mirror of
https://git.yoctoproject.org/poky
synced 2026-01-30 05:18:43 +01:00
Compare commits
562 Commits
uninative-
...
fido
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8763191981 | ||
|
|
ea6d21066a | ||
|
|
c53ddb2006 | ||
|
|
c9f0a99050 | ||
|
|
e6b06016bc | ||
|
|
740a2d817a | ||
|
|
5aed3ebfc1 | ||
|
|
3cea047b6c | ||
|
|
8463c06290 | ||
|
|
4d03d1880f | ||
|
|
9b9b8285be | ||
|
|
3a4d269d39 | ||
|
|
e09c0fae90 | ||
|
|
3167b4de2a | ||
|
|
dc77e99b27 | ||
|
|
45e2a649ee | ||
|
|
459a661ff5 | ||
|
|
a8ff4c8f42 | ||
|
|
0c49cef4a6 | ||
|
|
f5a7dd40ff | ||
|
|
1992a76b89 | ||
|
|
5d677d7dba | ||
|
|
dd90a73c51 | ||
|
|
4a9e57795e | ||
|
|
ad7ea213b2 | ||
|
|
2851c970ca | ||
|
|
fd394338ed | ||
|
|
dad3b97a9a | ||
|
|
900d7d6b59 | ||
|
|
3d61b942f7 | ||
|
|
a4f91cb18b | ||
|
|
bf9e5d6ef6 | ||
|
|
be0c978fa1 | ||
|
|
dbb46510cc | ||
|
|
7d663e9c47 | ||
|
|
f98b8b767d | ||
|
|
3aa8ba185b | ||
|
|
2250d4025b | ||
|
|
12cdd6d2b3 | ||
|
|
d4b6c1657b | ||
|
|
854c2e724d | ||
|
|
8ca73f8fa4 | ||
|
|
d25973e203 | ||
|
|
e1a2fb6e85 | ||
|
|
fbe015523f | ||
|
|
2f9a715583 | ||
|
|
d192c62891 | ||
|
|
34c865c7ba | ||
|
|
d9a3e4a5cf | ||
|
|
83af960b7d | ||
|
|
9a6a7150d9 | ||
|
|
37905e7663 | ||
|
|
06d9c89463 | ||
|
|
d7be819eed | ||
|
|
e6fca31dc0 | ||
|
|
b213ada9a7 | ||
|
|
2f1fc8c899 | ||
|
|
c94eb07d53 | ||
|
|
528bdf528d | ||
|
|
db99f58eea | ||
|
|
092903a2ef | ||
|
|
c2f4fe8d0c | ||
|
|
ce70f38442 | ||
|
|
f359ebd78d | ||
|
|
5ddc177fdf | ||
|
|
3c686ae014 | ||
|
|
27aeaab726 | ||
|
|
94c26caff1 | ||
|
|
05f4812d15 | ||
|
|
caa104fd2a | ||
|
|
73941fbc6a | ||
|
|
4d3ce52194 | ||
|
|
f0ecaf46bb | ||
|
|
165fa6ce62 | ||
|
|
1098a7bc0c | ||
|
|
e6d734904d | ||
|
|
b03994fe3b | ||
|
|
ec05eebf8d | ||
|
|
7ff74d177c | ||
|
|
9845a542a7 | ||
|
|
368da33ee7 | ||
|
|
48d9e00913 | ||
|
|
436e204445 | ||
|
|
389549c0bb | ||
|
|
88e28c86c5 | ||
|
|
6abe713244 | ||
|
|
66740c3314 | ||
|
|
2390475894 | ||
|
|
663943a802 | ||
|
|
7aaf773d32 | ||
|
|
9d44210c74 | ||
|
|
d3ad918a71 | ||
|
|
f9e5cc9e16 | ||
|
|
0ed23c1a0a | ||
|
|
5c12661f8c | ||
|
|
c7e2c072c2 | ||
|
|
c1162207c9 | ||
|
|
a690c381ab | ||
|
|
58f6a400d1 | ||
|
|
152e4c11e6 | ||
|
|
2924348da7 | ||
|
|
36ea7bd65f | ||
|
|
619b1c6b7d | ||
|
|
61f5562730 | ||
|
|
7cbff59de8 | ||
|
|
cfd74b00e5 | ||
|
|
01b93fb33d | ||
|
|
6e3eefb997 | ||
|
|
aff2257e0b | ||
|
|
118acdcf67 | ||
|
|
72a8138908 | ||
|
|
e3d6475b07 | ||
|
|
dd2b5c9653 | ||
|
|
31aa514524 | ||
|
|
446295e866 | ||
|
|
2761a2bd8b | ||
|
|
ddb058ca08 | ||
|
|
dfe39ea187 | ||
|
|
4525e29979 | ||
|
|
dbf6599fbe | ||
|
|
442518efac | ||
|
|
afff53db2a | ||
|
|
b8eb509ca5 | ||
|
|
7e9516cb0e | ||
|
|
dc6630b44b | ||
|
|
e0b0824470 | ||
|
|
f251052c88 | ||
|
|
7d2e451823 | ||
|
|
05108aa9c2 | ||
|
|
51246bfae3 | ||
|
|
e3425d4b0d | ||
|
|
5752ef3892 | ||
|
|
fa55b8e505 | ||
|
|
6b421cd7fe | ||
|
|
7b586ea6ff | ||
|
|
cbec5e933e | ||
|
|
ecec6c56b5 | ||
|
|
1aec522aad | ||
|
|
8124686aa2 | ||
|
|
8a19731e4d | ||
|
|
6c97739acb | ||
|
|
0120391b04 | ||
|
|
21469e0ef6 | ||
|
|
a142b2265a | ||
|
|
bbd4bfc061 | ||
|
|
7cf64ab594 | ||
|
|
e00d07ce45 | ||
|
|
7388b9c8d8 | ||
|
|
10228eed0f | ||
|
|
56b13544d7 | ||
|
|
ebec9603e7 | ||
|
|
ddefb0a380 | ||
|
|
f23f27d47e | ||
|
|
45c4f14dcb | ||
|
|
329be1e06d | ||
|
|
68460ed58c | ||
|
|
8ab8646e28 | ||
|
|
be0ea1022d | ||
|
|
fa9b57e6f5 | ||
|
|
e91684be80 | ||
|
|
d5236b6535 | ||
|
|
1025e7fa16 | ||
|
|
fdba15a1e7 | ||
|
|
e934ab58f4 | ||
|
|
2220cca84e | ||
|
|
76d164d5ad | ||
|
|
4d9919646a | ||
|
|
b74ea963ce | ||
|
|
963a0c2f76 | ||
|
|
f963541dc0 | ||
|
|
57eb677f0e | ||
|
|
5fecc456e5 | ||
|
|
4ce2a556ca | ||
|
|
174b15c2c0 | ||
|
|
bb931159ea | ||
|
|
346188f176 | ||
|
|
3b9a1144a5 | ||
|
|
687327d494 | ||
|
|
c18dff0871 | ||
|
|
ab7f4c1a6d | ||
|
|
78b2eaa516 | ||
|
|
1245acce18 | ||
|
|
6516ecd075 | ||
|
|
d6dcddbb3d | ||
|
|
1c65b4bdd8 | ||
|
|
e4f0b095e8 | ||
|
|
b50596d8f6 | ||
|
|
814bdc8a55 | ||
|
|
fa87963b35 | ||
|
|
d7cad22f48 | ||
|
|
7ccc4e25a3 | ||
|
|
b64fcadb20 | ||
|
|
7e8bd02a57 | ||
|
|
bdba0dd12d | ||
|
|
4fc8836379 | ||
|
|
0b1ea952ad | ||
|
|
541876e3e5 | ||
|
|
f08e550a89 | ||
|
|
be837d96fa | ||
|
|
cde69ef352 | ||
|
|
29495d0e8d | ||
|
|
704b4bf52c | ||
|
|
38fd01f417 | ||
|
|
1f404c9bd8 | ||
|
|
e232ad758a | ||
|
|
328d35b53d | ||
|
|
2adb210c8c | ||
|
|
982baf1130 | ||
|
|
38f48913ad | ||
|
|
aecebf120b | ||
|
|
dd86cb34e6 | ||
|
|
36978ade7f | ||
|
|
407975a71e | ||
|
|
86625d2a59 | ||
|
|
0c925855f7 | ||
|
|
bdf4d19dc3 | ||
|
|
29ef78c85a | ||
|
|
78fa7de4f2 | ||
|
|
ad872f2757 | ||
|
|
baab91ee8c | ||
|
|
f22f5b2e52 | ||
|
|
cd2c9acdbd | ||
|
|
08f9fbbc97 | ||
|
|
5954c4e593 | ||
|
|
9d9cc9dfbc | ||
|
|
4b824d5bed | ||
|
|
3ff9e84883 | ||
|
|
01a50376df | ||
|
|
7e9d0f0c7d | ||
|
|
65dfd5efd5 | ||
|
|
fac23dfd45 | ||
|
|
66941f0c08 | ||
|
|
13e7544ae9 | ||
|
|
e2ca16c6d3 | ||
|
|
f9a8370345 | ||
|
|
6dd71217a8 | ||
|
|
417c86b42b | ||
|
|
ea9016b60b | ||
|
|
3b732f01c9 | ||
|
|
f81b891959 | ||
|
|
411a70dea3 | ||
|
|
4e224f3922 | ||
|
|
2f2b0e8428 | ||
|
|
c9f56848e0 | ||
|
|
d46acb9296 | ||
|
|
ef7eb171a4 | ||
|
|
65a51fe6bf | ||
|
|
08d3259041 | ||
|
|
83aa565d93 | ||
|
|
0b506a72f4 | ||
|
|
ee88b51cf2 | ||
|
|
3773a7d16c | ||
|
|
e4453c98bd | ||
|
|
23c8b861d1 | ||
|
|
a77d2cc81c | ||
|
|
43969db1c7 | ||
|
|
8f8327d99b | ||
|
|
08607f7864 | ||
|
|
2468978cb4 | ||
|
|
5bda039a8e | ||
|
|
8fd70a3d70 | ||
|
|
0e2c483f7d | ||
|
|
010a940ba1 | ||
|
|
5977435538 | ||
|
|
5cdbf0ad06 | ||
|
|
33973cd841 | ||
|
|
cfb195ae80 | ||
|
|
55eee7ae3c | ||
|
|
0ece3d8ab5 | ||
|
|
1137495bbd | ||
|
|
58b038b778 | ||
|
|
f6430d42b4 | ||
|
|
b24aeb7b42 | ||
|
|
1cd4b260b5 | ||
|
|
5ce20aeb02 | ||
|
|
9c245f1d79 | ||
|
|
d3959f672a | ||
|
|
e9c47914f2 | ||
|
|
fed3899694 | ||
|
|
6ae42d8851 | ||
|
|
f76f0555b9 | ||
|
|
affbc3f649 | ||
|
|
919c7bf5c6 | ||
|
|
e4f3e5440b | ||
|
|
274d571316 | ||
|
|
f3324915ff | ||
|
|
e834984ce7 | ||
|
|
aa88803046 | ||
|
|
903cf70f3a | ||
|
|
8045026d49 | ||
|
|
60836f602a | ||
|
|
179c17970a | ||
|
|
e118d980da | ||
|
|
45d0819daf | ||
|
|
4acf353ffd | ||
|
|
4dd8c7e805 | ||
|
|
a8d4f5cd2b | ||
|
|
3716298102 | ||
|
|
ed010c7a38 | ||
|
|
3746f83469 | ||
|
|
4a2af0eaa9 | ||
|
|
c75869a7f8 | ||
|
|
d31b9e6326 | ||
|
|
20eab30866 | ||
|
|
d7f6451ad1 | ||
|
|
520c455b83 | ||
|
|
b317d79fb7 | ||
|
|
8c9551a7a3 | ||
|
|
6bc56e634a | ||
|
|
f64d73f8c4 | ||
|
|
f1d3b5f185 | ||
|
|
d1b6e1dfdb | ||
|
|
2bc3328b8b | ||
|
|
d768a80391 | ||
|
|
66dd73d1d1 | ||
|
|
eabd7359d1 | ||
|
|
af86c079dd | ||
|
|
a8d1d48cba | ||
|
|
34afb7cb38 | ||
|
|
606a6d36de | ||
|
|
cefb6645d7 | ||
|
|
1ee92bc5d5 | ||
|
|
34195e3210 | ||
|
|
c498744945 | ||
|
|
bf28314e59 | ||
|
|
7606fba211 | ||
|
|
ccaae1890e | ||
|
|
b610e29670 | ||
|
|
6a52d6d368 | ||
|
|
ab84c98f9f | ||
|
|
e82008724e | ||
|
|
34f38540d8 | ||
|
|
a9ddd71880 | ||
|
|
53f761d05e | ||
|
|
c583473a2b | ||
|
|
71676e587f | ||
|
|
86bf04ca9b | ||
|
|
d3f6656683 | ||
|
|
cdc2c8aeaa | ||
|
|
c495dccf58 | ||
|
|
8fc71f4247 | ||
|
|
10ee6be5b8 | ||
|
|
5b8a8f9490 | ||
|
|
fb3f5c68fd | ||
|
|
342958fce1 | ||
|
|
744d1ff8ce | ||
|
|
7d88afbdfd | ||
|
|
08a09c3410 | ||
|
|
d271a8e004 | ||
|
|
409bdca8e3 | ||
|
|
dc70442747 | ||
|
|
e34336da87 | ||
|
|
3b8170562c | ||
|
|
f0d6f7fe95 | ||
|
|
2cc0ea83d3 | ||
|
|
def79f5f31 | ||
|
|
7620a4ad38 | ||
|
|
6182fc36ae | ||
|
|
1901d6b02c | ||
|
|
50338c36c8 | ||
|
|
f831a73052 | ||
|
|
9e14d6efa2 | ||
|
|
50241173b7 | ||
|
|
c97d806d2f | ||
|
|
2f1a4d0f73 | ||
|
|
b9b7c619da | ||
|
|
3599cb53f8 | ||
|
|
e7b12a4e63 | ||
|
|
276606afe6 | ||
|
|
eb4a134a60 | ||
|
|
508c4cac32 | ||
|
|
e9ae2f8417 | ||
|
|
12cc076832 | ||
|
|
ca6ef30c7a | ||
|
|
59e4f9fc12 | ||
|
|
4dc652a0c3 | ||
|
|
83ac75ffb3 | ||
|
|
e23ec53caa | ||
|
|
45592f15d8 | ||
|
|
08dcacb083 | ||
|
|
944b46c223 | ||
|
|
26e18a143b | ||
|
|
704a053dd5 | ||
|
|
09eb069ccf | ||
|
|
67d83673ea | ||
|
|
6504fa687a | ||
|
|
7a277e901d | ||
|
|
b611530351 | ||
|
|
3e3507d02e | ||
|
|
8d07cacde3 | ||
|
|
205e9ed896 | ||
|
|
edb2b4760c | ||
|
|
b749898186 | ||
|
|
7b7b67fcef | ||
|
|
3857acab1a | ||
|
|
dc97c54915 | ||
|
|
2dd79784e8 | ||
|
|
e4ccc4dae3 | ||
|
|
732cdba31e | ||
|
|
af3518a605 | ||
|
|
f4b1bcbc38 | ||
|
|
605e8fad88 | ||
|
|
636cde886f | ||
|
|
b5ada042b5 | ||
|
|
9f524997ff | ||
|
|
4b64fb5baa | ||
|
|
d730e14639 | ||
|
|
75a982e74f | ||
|
|
1e64bf4a7e | ||
|
|
427c70443d | ||
|
|
0456e83de4 | ||
|
|
c18b02e5e9 | ||
|
|
aa263849ba | ||
|
|
534d2f3a5d | ||
|
|
2c3ae1ed7a | ||
|
|
0c20afe810 | ||
|
|
aab4a03e19 | ||
|
|
47876b91e9 | ||
|
|
2536f8d363 | ||
|
|
c6269fbbc4 | ||
|
|
52c03366ec | ||
|
|
c53661af38 | ||
|
|
2bb285d6b8 | ||
|
|
5553c4cd8d | ||
|
|
aefa1a6eea | ||
|
|
bd7710bff7 | ||
|
|
62675e990f | ||
|
|
006aafcb3c | ||
|
|
882f8c5743 | ||
|
|
d685adb430 | ||
|
|
35bbff5e5d | ||
|
|
8766cae8b4 | ||
|
|
f416e2b9bb | ||
|
|
0c045cab74 | ||
|
|
8d6080ebe0 | ||
|
|
c3613297f0 | ||
|
|
ec3a90915f | ||
|
|
75c0781c5e | ||
|
|
e267a38cd7 | ||
|
|
5b7487567c | ||
|
|
66fc24c6cb | ||
|
|
310e37485b | ||
|
|
5edbe941f9 | ||
|
|
66b63d921b | ||
|
|
08b10e93c4 | ||
|
|
a536594605 | ||
|
|
e2e522a6ed | ||
|
|
145fe000ac | ||
|
|
4206fcbf22 | ||
|
|
f366ff2c03 | ||
|
|
320a2206e0 | ||
|
|
e4f3cf8950 | ||
|
|
2a9486875d | ||
|
|
28e8418644 | ||
|
|
97e5d37719 | ||
|
|
7209c0bb13 | ||
|
|
4c8b3e10f7 | ||
|
|
63ee4133b8 | ||
|
|
47aee60bfe | ||
|
|
69cdf9658a | ||
|
|
4a90b6b0d9 | ||
|
|
0c7340105e | ||
|
|
ac91a56adc | ||
|
|
831fe9fa27 | ||
|
|
0947319266 | ||
|
|
da7196cc2d | ||
|
|
8614206f39 | ||
|
|
06cbcff7be | ||
|
|
12cc91a768 | ||
|
|
fea38f087c | ||
|
|
fe996c253e | ||
|
|
9dcec27bd5 | ||
|
|
854edda7d7 | ||
|
|
4aa45aca7a | ||
|
|
c6a9013728 | ||
|
|
1cc6544d69 | ||
|
|
19933e4474 | ||
|
|
41432d9d79 | ||
|
|
7144eb7774 | ||
|
|
59e5021b1f | ||
|
|
17c5df02e2 | ||
|
|
075f0b2c88 | ||
|
|
70832d863f | ||
|
|
90d943ae62 | ||
|
|
46669d3074 | ||
|
|
8f39398dbf | ||
|
|
be46715a9d | ||
|
|
f73d6752db | ||
|
|
33558eacc8 | ||
|
|
0db74ea8f4 | ||
|
|
08246fc7d4 | ||
|
|
d691916ef8 | ||
|
|
2f6a0659fe | ||
|
|
888fb56e97 | ||
|
|
1444bef551 | ||
|
|
d1df6afab2 | ||
|
|
faf19c5274 | ||
|
|
dee274305d | ||
|
|
20d2d2096a | ||
|
|
a274e39f0c | ||
|
|
dfaa74d34f | ||
|
|
d66075e595 | ||
|
|
5f46b438a8 | ||
|
|
674ffa8c68 | ||
|
|
9b6eee9f30 | ||
|
|
c636c22484 | ||
|
|
98ae8387e3 | ||
|
|
f96ffe74a9 | ||
|
|
66f1700481 | ||
|
|
d2d94d071c | ||
|
|
0b82198c8d | ||
|
|
cf475ab8ea | ||
|
|
4ef59fe419 | ||
|
|
64ccb07092 | ||
|
|
acf41de1fa | ||
|
|
3eabeef5d6 | ||
|
|
2f050af982 | ||
|
|
dbab9c0ba1 | ||
|
|
07fc680a4a | ||
|
|
8ab5bc3e1e | ||
|
|
59813630e5 | ||
|
|
ad7edfbde3 | ||
|
|
6fdf368f76 | ||
|
|
40db5ea7df | ||
|
|
56a4b1e1d4 | ||
|
|
80f3b20060 | ||
|
|
693ba0b14f | ||
|
|
e7b07d33c2 | ||
|
|
aa47ba52f0 | ||
|
|
00b2c3a5e4 | ||
|
|
23a0e97d2e | ||
|
|
0fb825c5e5 | ||
|
|
b0cb740fe0 | ||
|
|
6dd5e472eb | ||
|
|
3ff649953d | ||
|
|
8db54a9cfa | ||
|
|
77b1711afb | ||
|
|
1e551ec0c3 | ||
|
|
e643f3defe | ||
|
|
31d301a3f6 | ||
|
|
b803944ce6 | ||
|
|
64b75d4276 | ||
|
|
2ac2706bb7 | ||
|
|
a3360f2cc6 | ||
|
|
ef73b474fb | ||
|
|
b936350fc2 | ||
|
|
d81ee4d277 | ||
|
|
232ccf23cf | ||
|
|
a7e20761bd | ||
|
|
bc090aa673 | ||
|
|
6891ae6425 | ||
|
|
a842038bca | ||
|
|
7c0846cc5b | ||
|
|
47686dc42f | ||
|
|
938fa5cebf | ||
|
|
18661937e4 | ||
|
|
c1d31cf2c7 | ||
|
|
8b3d3e7c95 | ||
|
|
fe5b98019d | ||
|
|
184e00a36b | ||
|
|
8f1decb32b | ||
|
|
36ac2c6dfd |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,7 +1,7 @@
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
/build*/
|
||||
build*/
|
||||
pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
@@ -14,7 +14,6 @@ hob-image-*.bb
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
!meta-poky
|
||||
!meta-yocto
|
||||
!meta-yocto-bsp
|
||||
!meta-yocto-imported
|
||||
@@ -22,6 +21,3 @@ documentation/user-manual/user-manual.html
|
||||
documentation/user-manual/user-manual.pdf
|
||||
documentation/user-manual/user-manual.tgz
|
||||
pull-*/
|
||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
||||
bitbake/lib/toaster/contrib/tts/log/*
|
||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
||||
@@ -1,2 +1,2 @@
|
||||
# Template settings
|
||||
TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
|
||||
TEMPLATECONF=${TEMPLATECONF:-meta-yocto/conf}
|
||||
|
||||
2
README
2
README
@@ -42,7 +42,7 @@ documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
|
||||
meta-poky, meta-yocto-bsp:
|
||||
meta-yocto(-bsp):
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
|
||||
@@ -105,7 +105,9 @@ Intel Atom platforms:
|
||||
|
||||
and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE
|
||||
type supports ethernet, wifi, sound, and Intel/vesa graphics by default in
|
||||
addition to common PC input devices, busses, and so on.
|
||||
addition to common PC input devices, busses, and so on. Note that it does not
|
||||
included the binary-only graphic drivers used on some Atom platforms, for
|
||||
accelerated graphics on these machines please refer to meta-intel.
|
||||
|
||||
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
||||
or over the network. Writing generated images to physical media is
|
||||
|
||||
@@ -5,11 +5,6 @@ The following external components are distributed with this software:
|
||||
* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software
|
||||
Foundation and individual contributors.
|
||||
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the MIT license
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the Apache License 2.0.
|
||||
|
||||
* jQuery is redistributed under the MIT license.
|
||||
|
||||
* Twitter typeahead.js redistributed under the MIT license. Note that the JS source has one small modification, so the full unminified file is currently included to make it obvious where this is.
|
||||
|
||||
* jsrender is redistributed under the MIT license.
|
||||
|
||||
* QUnit is redistributed under the MIT license.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -33,24 +33,17 @@ except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
__version__ = "1.31.1"
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
ret = bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration())
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
# bitbake-diffsigs
|
||||
# BitBake task signature data comparison utility
|
||||
@@ -24,7 +24,6 @@ import warnings
|
||||
import fnmatch
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
@@ -96,7 +95,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
# Recurse into signature comparison
|
||||
output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@@ -115,13 +114,14 @@ parser.add_option("-t", "--task",
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
if options.taskargs:
|
||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
||||
tinfoil.prepare(config_only=True)
|
||||
find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1])
|
||||
tinfoil = bb.tinfoil.Tinfoil()
|
||||
tinfoil.prepare(config_only = True)
|
||||
find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1])
|
||||
else:
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
import cPickle
|
||||
try:
|
||||
if len(args) == 2:
|
||||
output = bb.siggen.dump_sigfile(sys.argv[1])
|
||||
@@ -130,9 +130,9 @@ else:
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
# bitbake-dumpsig
|
||||
# BitBake task signature dump utility
|
||||
@@ -23,7 +23,6 @@ import sys
|
||||
import warnings
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
@@ -52,14 +51,15 @@ options, args = parser.parse_args(sys.argv)
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
import cPickle
|
||||
try:
|
||||
output = bb.siggen.dump_sigfile(args[1])
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys,logging
|
||||
import optparse
|
||||
@@ -50,6 +50,6 @@ if __name__ == "__main__":
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
traceback.print_exc(5)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
@@ -25,48 +25,25 @@ try:
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
def usage():
|
||||
print('usage: %s [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
if '--help' in sys.argv[1:]:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
tests = sys.argv[1:]
|
||||
else:
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
t = '.'.join(t.split('.')[:3])
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
# Set-up logging
|
||||
class StdoutStreamHandler(logging.StreamHandler):
|
||||
"""Special handler so that unittest is able to capture stdout"""
|
||||
def __init__(self):
|
||||
# Override __init__() because we don't want to set self.stream here
|
||||
logging.Handler.__init__(self)
|
||||
|
||||
@property
|
||||
def stream(self):
|
||||
# We want to dynamically write wherever sys.stdout is pointing to
|
||||
return sys.stdout
|
||||
|
||||
|
||||
handler = StdoutStreamHandler()
|
||||
bb.logger.addHandler(handler)
|
||||
bb.logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
ENV_HELP = """\
|
||||
Environment variables:
|
||||
BB_SKIP_NETTESTS set to 'yes' in order to skip tests using network
|
||||
connection
|
||||
BB_TMPDIR_NOCLEAN set to 'yes' to preserve test tmp directories
|
||||
"""
|
||||
|
||||
class main(unittest.main):
|
||||
def _print_help(self, *args, **kwargs):
|
||||
super(main, self)._print_help(*args, **kwargs)
|
||||
print(ENV_HELP)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(defaultTest=tests, buffer=True)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -10,11 +10,6 @@ import bb
|
||||
import select
|
||||
import errno
|
||||
import signal
|
||||
import pickle
|
||||
from multiprocessing import Lock
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
@@ -22,33 +17,24 @@ if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1].startswith("decafbadbad"):
|
||||
if sys.argv[1] == "decafbadbad":
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
import fcntl
|
||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
||||
fl |= os.O_SYNC
|
||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
||||
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
worker_pipe = sys.stdout.fileno()
|
||||
bb.utils.nonblockingfd(worker_pipe)
|
||||
# Need to guard against multiprocessing being used in child processes
|
||||
# and multiple processes trying to write to the parent at the same time
|
||||
worker_pipe_lock = None
|
||||
|
||||
handler = bb.event.LogHandler()
|
||||
logger.addHandler(handler)
|
||||
@@ -63,10 +49,10 @@ if 0:
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
worker_queue = b""
|
||||
worker_queue = ""
|
||||
|
||||
def worker_fire(event, d):
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
worker_fire_prepickled(data)
|
||||
|
||||
def worker_fire_prepickled(event):
|
||||
@@ -85,21 +71,14 @@ def worker_flush():
|
||||
written = os.write(worker_pipe, worker_queue)
|
||||
worker_queue = worker_queue[written:]
|
||||
except (IOError, OSError) as e:
|
||||
if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
def worker_child_fire(event, d):
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
worker_pipe_lock.release()
|
||||
except IOError:
|
||||
sigterm_handler(None, None)
|
||||
raise
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
worker_pipe.write(data)
|
||||
|
||||
bb.event.worker_fire = worker_fire
|
||||
|
||||
@@ -115,7 +94,7 @@ def sigterm_handler(signum, frame):
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
|
||||
@@ -160,26 +139,22 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
|
||||
pipeout = os.fdopen(pipeout, 'wb', 0)
|
||||
pid = os.fork()
|
||||
except OSError as e:
|
||||
logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
bb.utils.signal_on_parent_exit("SIGTERM")
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
bb.event.worker_pid = os.getpid()
|
||||
bb.event.worker_fire = worker_child_fire
|
||||
worker_pipe = pipeout
|
||||
worker_pipe_lock = Lock()
|
||||
|
||||
# Make the child the process group leader and ensure no
|
||||
# child process will be controlled by the current terminal
|
||||
@@ -193,44 +168,28 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
|
||||
if umask:
|
||||
os.umask(umask)
|
||||
|
||||
data.setVar("BB_WORKERCONTEXT", "1")
|
||||
data.setVar("BB_TASKDEPDATA", taskdepdata)
|
||||
data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
data.setVar("DATE", workerdata["date"])
|
||||
data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
ret = 0
|
||||
try:
|
||||
bb_cache = bb.cache.NoCache(databuilder)
|
||||
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
|
||||
the_data = databuilder.mcdata[mc]
|
||||
the_data.setVar("BB_WORKERCONTEXT", "1")
|
||||
the_data.setVar("BB_TASKDEPDATA", taskdepdata)
|
||||
the_data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
the_data.setVar("DATE", workerdata["date"])
|
||||
the_data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
ret = 0
|
||||
|
||||
the_data = bb_cache.loadDataFull(fn, appends)
|
||||
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
|
||||
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", "")))
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
exports = bb.data.exported_vars(the_data)
|
||||
|
||||
bb.utils.empty_environment()
|
||||
for e, v in exports:
|
||||
os.environ[e] = v
|
||||
|
||||
for e in fakeenv:
|
||||
os.environ[e] = fakeenv[e]
|
||||
the_data.setVar(e, fakeenv[e])
|
||||
the_data.setVarFlag(e, 'export', "1")
|
||||
|
||||
task_exports = the_data.getVarFlag(taskname, 'exports', True)
|
||||
if task_exports:
|
||||
for e in task_exports.split():
|
||||
the_data.setVarFlag(e, 'export', '1')
|
||||
v = the_data.getVar(e, True)
|
||||
if v is not None:
|
||||
os.environ[e] = v
|
||||
|
||||
if quieterrors:
|
||||
the_data.setVarFlag(taskname, "quieterrors", "1")
|
||||
|
||||
@@ -256,7 +215,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in iter(envbackup.items()):
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
del os.environ[key]
|
||||
else:
|
||||
@@ -273,22 +232,22 @@ class runQueueWorkerPipe():
|
||||
if pipeout:
|
||||
pipeout.close()
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
self.queue = ""
|
||||
|
||||
def read(self):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
self.queue = self.queue + (self.input.read(102400) or b"")
|
||||
self.queue = self.queue + self.input.read(102400)
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
end = len(self.queue)
|
||||
index = self.queue.find(b"</event>")
|
||||
index = self.queue.find("</event>")
|
||||
while index != -1:
|
||||
worker_fire_prepickled(self.queue[:index+8])
|
||||
self.queue = self.queue[index+8:]
|
||||
index = self.queue.find(b"</event>")
|
||||
index = self.queue.find("</event>")
|
||||
return (end > start)
|
||||
|
||||
def close(self):
|
||||
@@ -304,7 +263,7 @@ class BitbakeWorker(object):
|
||||
def __init__(self, din):
|
||||
self.input = din
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
self.queue = ""
|
||||
self.cookercfg = None
|
||||
self.databuilder = None
|
||||
self.data = None
|
||||
@@ -314,16 +273,12 @@ class BitbakeWorker(object):
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
if "beef" in sys.argv[1]:
|
||||
bb.utils.set_process_name("Worker (Fakeroot)")
|
||||
else:
|
||||
bb.utils.set_process_name("Worker")
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker received SIGTERM, shutting down...")
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker received SIGHUP, shutting down...")
|
||||
bb.warn("Worker recieved SIGHUP, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
@@ -331,22 +286,19 @@ class BitbakeWorker(object):
|
||||
def serve(self):
|
||||
while True:
|
||||
(ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
|
||||
if self.input in ready:
|
||||
if self.input in ready or len(self.queue):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
r = self.input.read()
|
||||
if len(r) == 0:
|
||||
# EOF on pipe, server must have terminated
|
||||
self.sigterm_exception(signal.SIGTERM, None)
|
||||
self.queue = self.queue + r
|
||||
self.queue = self.queue + self.input.read()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if len(self.queue):
|
||||
self.handle_item(b"cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item(b"workerdata", self.handle_workerdata)
|
||||
self.handle_item(b"runtask", self.handle_runtask)
|
||||
self.handle_item(b"finishnow", self.handle_finishnow)
|
||||
self.handle_item(b"ping", self.handle_ping)
|
||||
self.handle_item(b"quit", self.handle_quit)
|
||||
end = len(self.queue)
|
||||
self.handle_item("cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item("workerdata", self.handle_workerdata)
|
||||
self.handle_item("runtask", self.handle_runtask)
|
||||
self.handle_item("finishnow", self.handle_finishnow)
|
||||
self.handle_item("ping", self.handle_ping)
|
||||
self.handle_item("quit", self.handle_quit)
|
||||
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
@@ -356,12 +308,12 @@ class BitbakeWorker(object):
|
||||
|
||||
|
||||
def handle_item(self, item, func):
|
||||
if self.queue.startswith(b"<" + item + b">"):
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
if self.queue.startswith("<" + item + ">"):
|
||||
index = self.queue.find("</" + item + ">")
|
||||
while index != -1:
|
||||
func(self.queue[(len(item) + 2):index])
|
||||
self.queue = self.queue[(index + len(item) + 3):]
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
index = self.queue.find("</" + item + ">")
|
||||
|
||||
def handle_cookercfg(self, data):
|
||||
self.cookercfg = pickle.loads(data)
|
||||
@@ -375,13 +327,12 @@ class BitbakeWorker(object):
|
||||
bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
|
||||
bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
|
||||
bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
|
||||
for mc in self.databuilder.mcdata:
|
||||
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
|
||||
self.data.setVar("PRSERV_HOST", self.workerdata["prhost"])
|
||||
|
||||
def handle_ping(self, _):
|
||||
workerlog_write("Handling ping\n")
|
||||
|
||||
logger.warning("Pong from bitbake-worker!")
|
||||
logger.warn("Pong from bitbake-worker!")
|
||||
|
||||
def handle_quit(self, data):
|
||||
workerlog_write("Handling quit\n")
|
||||
@@ -394,7 +345,7 @@ class BitbakeWorker(object):
|
||||
fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data)
|
||||
workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
|
||||
|
||||
pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors)
|
||||
pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors)
|
||||
|
||||
self.build_pids[pid] = task
|
||||
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
|
||||
@@ -426,12 +377,12 @@ class BitbakeWorker(object):
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
|
||||
worker_fire_prepickled("<exitcode>" + pickle.dumps((task, status)) + "</exitcode>")
|
||||
|
||||
def handle_finishnow(self, _):
|
||||
if self.build_pids:
|
||||
logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
|
||||
for k, v in iter(self.build_pids.items()):
|
||||
for k, v in self.build_pids.iteritems():
|
||||
try:
|
||||
os.kill(-k, signal.SIGTERM)
|
||||
os.waitpid(-1, 0)
|
||||
@@ -441,7 +392,7 @@ class BitbakeWorker(object):
|
||||
self.build_pipes[pipe].read()
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(os.fdopen(sys.stdin.fileno(), 'rb'))
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -462,7 +462,7 @@ def main():
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = documentation.getVarFlag(key, "doc", False)
|
||||
data = documentation.getVarFlag(key, "doc")
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
122
bitbake/bin/image-writer
Executable file
122
bitbake/bin/image-writer
Executable file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
|
||||
os.path.abspath(__file__))), 'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
import gtk
|
||||
import optparse
|
||||
import pygtk
|
||||
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
def __init__(self, image_path=''):
|
||||
super(DeployWindow, self).__init__()
|
||||
|
||||
if len(image_path) > 0:
|
||||
valid = True
|
||||
if not os.path.exists(image_path):
|
||||
valid = False
|
||||
lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> to select an image." % image_path
|
||||
else:
|
||||
image_path = os.path.abspath(image_path)
|
||||
extend_name = os.path.splitext(image_path)[1][1:]
|
||||
if extend_name not in DEPLOYABLE_IMAGE_TYPES:
|
||||
valid = False
|
||||
lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> to select an image." % extend_name
|
||||
|
||||
if not valid:
|
||||
image_path = ''
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
self.deploy_dialog = DeployImageDialog(Title, image_path, self,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
|
||||
close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(close_button)
|
||||
close_button.connect('clicked', gtk.main_quit)
|
||||
|
||||
write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(write_button)
|
||||
|
||||
self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
|
||||
self.deploy_dialog.connect('destroy', gtk.main_quit)
|
||||
response = self.deploy_dialog.show()
|
||||
|
||||
def select_image_clicked_cb(self, dialog):
|
||||
cwd = os.getcwd()
|
||||
dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(button)
|
||||
response = dialog.run()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
# get the full path of image
|
||||
image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
|
||||
self.deploy_dialog.set_image_text_buffer(image_path)
|
||||
self.deploy_dialog.set_image_path(image_path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog [-h] [image_file]
|
||||
|
||||
%prog writes bootable images to USB devices. You can
|
||||
provide the image file on the command line or select it using the GUI.""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
image_file = args[1] if len(args) > 1 else ''
|
||||
dw = DeployWindow(image_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc(3)
|
||||
@@ -1,8 +1,5 @@
|
||||
#!/bin/echo ERROR: This script needs to be sourced. Please run as .
|
||||
|
||||
# toaster - shell script to start Toaster
|
||||
|
||||
# Copyright (C) 2013-2015 Intel Corp.
|
||||
#!/bin/bash
|
||||
# (c) 2013 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -15,251 +12,329 @@
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see http://www.gnu.org/licenses/.
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
HELP="
|
||||
Usage: source toaster start|stop [webport=<address:port>] [noweb]
|
||||
Optional arguments:
|
||||
[noweb] Setup the environment for building with toaster but don't start the development server
|
||||
[webport] Set the development server (default: localhost:8000)
|
||||
"
|
||||
|
||||
webserverKillAll()
|
||||
# This script can be run in two modes.
|
||||
|
||||
# When used with "source", from a build directory,
|
||||
# it enables toaster event logging and starts the bitbake resident server.
|
||||
# use as: source toaster [start|stop] [noweb] [noui]
|
||||
|
||||
# When it is called as a stand-alone script, it starts just the
|
||||
# web server, and the building shall be done through the web interface.
|
||||
# As script, it will not return to the command prompt. Stop with Ctrl-C.
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
function webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
# you can always add a superuser later via
|
||||
# ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
|
||||
$MANAGE migrate --noinput || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed migrations, aborting system start" 1>&2
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
echo -e "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "x$TOASTER_MANAGED" == "x1" ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
fi
|
||||
# Make sure that checksettings can pick up any value for TEMPLATECONF
|
||||
export TEMPLATECONF
|
||||
$MANAGE checksettings --traceback || retval=1
|
||||
}
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError while checking settings; aborting\n"
|
||||
return $retval
|
||||
fi
|
||||
# Helper functions to add a special configuration file
|
||||
|
||||
echo "Starting webserver..."
|
||||
|
||||
$MANAGE runserver "$ADDR_PORT" \
|
||||
</dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
|
||||
& echo $! >${BUILDDIR}/.toastermain.pid
|
||||
|
||||
sleep 1
|
||||
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Toaster development webserver started at http://$ADDR_PORT"
|
||||
echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
|
||||
fi
|
||||
|
||||
return $retval
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
function stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
if [ $INSTOPSYSTEM == 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
if [ -f ${BUILDDIR}/.toasterui.pid ]; then
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset TOASTER_DIR
|
||||
unset BITBAKE_UI
|
||||
unset BBBASEDIR
|
||||
# force stop any misbehaving bitbake server
|
||||
lsof bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill
|
||||
trap - SIGHUP
|
||||
#trap - SIGCHLD
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
verify_prereq() {
|
||||
# Verify Django version
|
||||
reqfile=$(python3 -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
|
||||
exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
|
||||
exp=$exp'import sys,django;version=django.get_version().split(".");'
|
||||
exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
|
||||
if ! sed -n "$exp" $reqfile | python3 - ; then
|
||||
req=`grep ^Django $reqfile`
|
||||
echo "This program needs $req"
|
||||
echo "Please install with pip install -r $reqfile"
|
||||
function check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 $(< $1) 2>/dev/null
|
||||
}
|
||||
|
||||
|
||||
function notify_chldexit() {
|
||||
if [ $NOTOASTERUI == 0 ]; then
|
||||
check_pidbyfile ${BUILDDIR}/.toasterui.pid && return
|
||||
stop_system
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function verify_prereq() {
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
fi
|
||||
|
||||
export BBBASEDIR=`dirname $TOASTER`/..
|
||||
MANAGE="python3 $BBBASEDIR/lib/toaster/manage.py"
|
||||
OEROOT=`dirname $TOASTER`/../..
|
||||
|
||||
# this is the configuraton file we are using for toaster
|
||||
# we are using the same logic that oe-setup-builddir uses
|
||||
# (based on TEMPLATECONF and .templateconf) to determine
|
||||
# which toasterconf.json to use.
|
||||
# note: There are a number of relative path assumptions
|
||||
# in the local layers that currently make using an arbitrary
|
||||
# toasterconf.json difficult.
|
||||
|
||||
. $OEROOT/.templateconf
|
||||
if [ -n "$TEMPLATECONF" ]; then
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
# Allow TEMPLATECONF=meta-xyz/conf as a shortcut
|
||||
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
|
||||
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
|
||||
export TOASTER_DIR=`pwd`
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
ADDR_PORT="localhost:8000"
|
||||
unset CMD
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
start )
|
||||
CMD=$param
|
||||
;;
|
||||
stop )
|
||||
CMD=$param
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
ADDR_PORT="${param#*=}"
|
||||
# Split the addr:port string
|
||||
ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
|
||||
PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
|
||||
# If only a port has been speified then set address to localhost.
|
||||
if [ $ADDR = $PORT ] ; then
|
||||
ADDR_PORT="localhost:$PORT"
|
||||
fi
|
||||
;;
|
||||
--help)
|
||||
echo "$HELP"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$HELP"
|
||||
return 1
|
||||
;;
|
||||
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as . $TOASTER"
|
||||
return 1
|
||||
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
echo -e "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ "x`which daemon`" == "x" ]; then
|
||||
echo -e "Failed dependency; toaster needs the 'daemon' program in order to be able to start builds'. Please install the 'daemon' program from your distribution repositories or http://www.libslack.org/daemon/" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp/toaster_$$
|
||||
if [ -d "$BUILDDIR" ]; then
|
||||
echo -e "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
mkdir -p "$BUILDDIR"
|
||||
|
||||
RUNNING=1
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
|
||||
function do_cleanup() {
|
||||
find "$BUILDDIR" -type f | xargs rm
|
||||
rmdir "$BUILDDIR"
|
||||
}
|
||||
function cleanup() {
|
||||
if grep -ir error "$BUILDDIR" >/dev/null; then
|
||||
if grep -irn "That port is already in use" "$BUILDDIR"; then
|
||||
echo "You can use the \"webport=PORTNUMBER\" parameter to start Toaster on a different port (port $WEB_PORT is already in use)"
|
||||
do_cleanup
|
||||
else
|
||||
echo -e "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
fi
|
||||
else
|
||||
echo "No errors found, removing the run directory '$BUILDDIR'"
|
||||
do_cleanup
|
||||
fi;
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
cleanup
|
||||
exit 1;
|
||||
fi
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
echo "Starting browser..."
|
||||
xdg-open http://127.0.0.1:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
fi
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Toaster is now running. You can stop it with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds 2>&1 | tee -a "$BUILDDIR/toaster.log"
|
||||
sleep 1
|
||||
done
|
||||
cleanup
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
return 1;
|
||||
fi
|
||||
|
||||
verify_prereq || return 1
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd`
|
||||
# like it currently does.
|
||||
export TOASTER_DIR=`dirname $BUILDDIR`
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$CMD" = "start" ] ; then
|
||||
if [ -n "$BBSERVER" ]; then
|
||||
echo " Toaster is already running. Exiting..."
|
||||
return 1
|
||||
fi
|
||||
elif [ "$CMD" = "" ]; then
|
||||
echo "No command specified"
|
||||
echo "$HELP"
|
||||
return 1
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi;
|
||||
fi
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
|
||||
lock=1
|
||||
if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
# check if addr:port is not in use
|
||||
if [ "$CMD" == 'start' ]; then
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
$MANAGE checksocket "$ADDR_PORT" || return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create configuration file
|
||||
conf=${BUILDDIR}/conf/local.conf
|
||||
line='INHERIT+="toaster buildhistory"'
|
||||
grep -q "$line" $conf || echo $line >> $conf
|
||||
|
||||
start_success=1
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
export BITBAKE_UI='toasterui'
|
||||
$MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
|
||||
# set fail safe stop system on terminal exit
|
||||
unset BBSERVER
|
||||
PREREAD=""
|
||||
if [ -e ${BUILDDIR}/conf/toaster-pre.conf ]; then
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
fi
|
||||
bitbake $PREREAD --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >>${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
#trap notify_chldexit SIGCHLD
|
||||
;;
|
||||
stop )
|
||||
stop_system
|
||||
echo "Successful ${CMD}."
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -21,106 +21,159 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""
|
||||
This command takes a filename as a single parameter. The filename is read
|
||||
as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
and log data in the database
|
||||
"""
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import pickle
|
||||
import codecs
|
||||
|
||||
from collections import namedtuple
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
from os.path import join, dirname, abspath
|
||||
sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
class EventPlayer:
|
||||
"""Emulate a connection to a bitbake server."""
|
||||
logger = logging.getLogger(__name__)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(format=format_str)
|
||||
|
||||
def __init__(self, eventfile, variables):
|
||||
self.eventfile = eventfile
|
||||
self.variables = variables
|
||||
self.eventmask = []
|
||||
|
||||
def waitEvent(self, _timeout):
|
||||
"""Read event from the file."""
|
||||
line = self.eventfile.readline().strip()
|
||||
if not line:
|
||||
return
|
||||
try:
|
||||
event_str = json.loads(line)['vars'].encode('utf-8')
|
||||
event = pickle.loads(codecs.decode(event_str, 'base64'))
|
||||
event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
|
||||
if event_name not in self.eventmask:
|
||||
return
|
||||
return event
|
||||
except ValueError as err:
|
||||
print("Failed loading ", line)
|
||||
raise err
|
||||
import json, pickle
|
||||
|
||||
def runCommand(self, command_line):
|
||||
"""Emulate running a command on the server."""
|
||||
name = command_line[0]
|
||||
|
||||
if name == "getVariable":
|
||||
var_name = command_line[1]
|
||||
variable = self.variables.get(var_name)
|
||||
if variable:
|
||||
return variable['v'], None
|
||||
return None, "Missing variable %s" % var_name
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
elif name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = command_line[1]
|
||||
for key, val in self.variables.items():
|
||||
try:
|
||||
if not key.startswith("__"):
|
||||
dump[key] = {
|
||||
'v': val['v'],
|
||||
'history' : val['history'],
|
||||
}
|
||||
for flag in flaglist:
|
||||
dump[key][flag] = val[flag]
|
||||
except Exception as err:
|
||||
print(err)
|
||||
return (dump, None)
|
||||
|
||||
elif name == 'setEventMask':
|
||||
self.eventmask = command_line[-1]
|
||||
return True, None
|
||||
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % command_line[0])
|
||||
|
||||
def getEventHandle(self):
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
This method is called by toasterui.
|
||||
The return value is passed to self.runCommand but not used there.
|
||||
"""
|
||||
pass
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def main(argv):
|
||||
with open(argv[-1]) as eventfile:
|
||||
# load variables from the first line
|
||||
variables = json.loads(eventfile.readline().strip())['allvariables']
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
params = namedtuple('ConfigParams', ['observe_only'])(True)
|
||||
player = EventPlayer(eventfile, variables)
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
return toasterui.main(player, player, params)
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: %s <event file>" % os.path.basename(sys.argv[0]))
|
||||
if len(sys.argv) < 2:
|
||||
logger.error("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(main(sys.argv))
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program
|
||||
toasterui.main(mock_connection.connection, mock_connection.events, configParams)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -29,14 +29,14 @@ import warnings
|
||||
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
|
||||
from bb.cache import CoreRecipeInfo
|
||||
|
||||
import pickle as pickle
|
||||
import cPickle as pickle
|
||||
|
||||
def main(argv=None):
|
||||
"""
|
||||
Get the mapping for the target recipe.
|
||||
"""
|
||||
if len(argv) != 1:
|
||||
print("Error, need one argument!", file=sys.stderr)
|
||||
print >>sys.stderr, "Error, need one argument!"
|
||||
return 2
|
||||
|
||||
cachefile = argv[0]
|
||||
@@ -56,7 +56,7 @@ def main(argv=None):
|
||||
continue
|
||||
|
||||
# 1.0 is the default version for a no PV recipe.
|
||||
if "pv" in val.__dict__:
|
||||
if val.__dict__.has_key("pv"):
|
||||
pv = val.pv
|
||||
else:
|
||||
pv = "1.0"
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
@@ -37,16 +37,14 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
A common way to determine this value for your build host is to run:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
and count the number of processors displayed. Note that the number of
|
||||
processors will take into account hyper-threading, so that a quad-core
|
||||
build host with hyper-threading will most likely show eight processors,
|
||||
which is the value you would then assign to that variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -116,35 +114,16 @@
|
||||
Prior to parsing configuration files, Bitbake looks
|
||||
at certain variables, including:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para><link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link></para></listitem>
|
||||
<listitem><para><link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link></para></listitem>
|
||||
<listitem><para><link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link></para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
The first four variables in this list relate to how BitBake treats shell
|
||||
environment variables during task execution.
|
||||
By default, BitBake cleans the environment variables and provides tight
|
||||
control over the shell execution environment.
|
||||
However, through the use of these first four variables, you can
|
||||
apply your control regarding the
|
||||
environment variables allowed to be used by BitBake in the shell
|
||||
during execution of tasks.
|
||||
See the
|
||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>"
|
||||
section and the information about these variables in the
|
||||
variable glossary for more information on how they work and
|
||||
on how to use them.
|
||||
You can find information on how to pass environment variables into the BitBake
|
||||
execution environment in the
|
||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>" section.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -306,8 +285,8 @@
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
<filename>PN</filename> to "something" and
|
||||
@@ -596,7 +575,7 @@
|
||||
"<link linkend='checksums'>Checksums (Signatures)</link>"
|
||||
section for information).
|
||||
It is also possible to append extra metadata to the stamp using
|
||||
the <filename>[stamp-extra-info]</filename> task flag.
|
||||
the "stamp-extra-info" task flag.
|
||||
For example, OpenEmbedded uses this flag to make some tasks machine-specific.
|
||||
</para>
|
||||
|
||||
@@ -653,8 +632,7 @@
|
||||
</itemizedlist>
|
||||
It is possible to have functions run before and after a task's main
|
||||
function.
|
||||
This is done using the <filename>[prefuncs]</filename>
|
||||
and <filename>[postfuncs]</filename> flags of the task
|
||||
This is done using the "prefuncs" and "postfuncs" flags of the task
|
||||
that lists the functions to run.
|
||||
</para>
|
||||
</section>
|
||||
@@ -804,13 +782,13 @@
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_task-<taskname></filename>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_<filename:taskname></filename>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BBHASHDEPS_<filename:taskname></filename>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
@@ -828,7 +806,7 @@
|
||||
itself.
|
||||
The simplest parameter to pass is "none", which causes a
|
||||
set of signature information to be written out into
|
||||
<filename>STAMPS_DIR</filename>
|
||||
<filename>STAMP_DIR</filename>
|
||||
corresponding to the targets specified.
|
||||
The other currently available parameter is "printdiff",
|
||||
which causes BitBake to try to establish the closest
|
||||
|
||||
@@ -157,8 +157,8 @@
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[md5sum] = "value"
|
||||
SRC_URI[sha256sum] = "value"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
@@ -325,25 +325,6 @@
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
<note>
|
||||
Because URL parameters are delimited by semi-colons, this can
|
||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
||||
for example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
||||
</literallayout>
|
||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
||||
</literallayout>
|
||||
In most cases this should work. Treating semi-colons and '&' in queries
|
||||
identically is recommended by the World Wide Web Consortium (W3C).
|
||||
Note that due to the nature of the URL, you may have to specify the name
|
||||
of the downloaded file as well:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
||||
</literallayout>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
@@ -647,7 +628,7 @@
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
@@ -665,19 +646,13 @@
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
prepending "/" character, in the selected VOB
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the following load rule in
|
||||
the view config spec:
|
||||
<literallayout class='monospaced'>
|
||||
load <vob><module>
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
@@ -716,68 +691,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='perforce-fetcher'>
|
||||
<title>Perforce Fetcher (<filename>p4://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the
|
||||
<ulink url='https://www.perforce.com/'>Perforce</ulink>
|
||||
source control system.
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_p4</filename>, which defaults
|
||||
to "p4".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-P4DIR'><filename>P4DIR</filename></link>,
|
||||
which defaults to "DL_DIR/p4".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> values.
|
||||
The p4 executable is able to use the config file defined by your
|
||||
system's <filename>P4CONFIG</filename> environment variable in
|
||||
order to define the Perforce server URL and port, username, and
|
||||
password if you do not wish to keep those values in a recipe
|
||||
itself.
|
||||
If you choose not to use <filename>P4CONFIG</filename>,
|
||||
or to explicitly set variables that <filename>P4CONFIG</filename>
|
||||
can contain, you can specify the <filename>P4PORT</filename> value,
|
||||
which is the server's URL and port number, and you can
|
||||
specify a username and password directly in your recipe within
|
||||
<filename>SRC_URI</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example that relies on <filename>P4CONFIG</filename>
|
||||
to specify the server URL and port, username, and password, and
|
||||
fetches the Head Revision:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "p4://example-depot/main/source/..."
|
||||
SRCREV = "${AUTOREV}"
|
||||
PV = "p4-${SRCPV}"
|
||||
S = "${WORKDIR}/p4"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is an example that specifies the server URL and port,
|
||||
username, and password, and fetches a Revision based on a Label:
|
||||
<literallayout class='monospaced'>
|
||||
P4PORT = "tcp:p4server.example.net:1666"
|
||||
SRC_URI = "p4://user:passwd@example-depot/main/source/..."
|
||||
SRCREV = "release-1.0"
|
||||
PV = "p4-${SRCPV}"
|
||||
S = "${WORKDIR}/p4"
|
||||
</literallayout>
|
||||
<note>
|
||||
You should always set <filename>S</filename>
|
||||
to <filename>"${WORKDIR}/p4"</filename> in your recipe.
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
@@ -787,6 +700,9 @@
|
||||
<listitem><para>
|
||||
Bazaar (<filename>bzr://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Perforce (<filename>p4://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
-rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
|
||||
drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
|
||||
-rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
|
||||
-rwxrwxr-x. 1 wmat wmat 3195 Jan 31 11:57 setup.py
|
||||
-rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -134,7 +135,7 @@
|
||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
<ulink url="https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</note>
|
||||
@@ -220,7 +221,7 @@
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ BBPATH="<projectdirectory>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
@@ -269,7 +270,7 @@
|
||||
and define some key BitBake variables.
|
||||
For more information on the <filename>bitbake.conf</filename>,
|
||||
see
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
</para>
|
||||
<para>Use the following commands to create the <filename>conf</filename>
|
||||
directory in the project directory:
|
||||
@@ -354,7 +355,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
supporting.
|
||||
For more information on the <filename>base.bbclass</filename> file,
|
||||
you can look at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>classes/base.bbclass</filename>
|
||||
@@ -376,7 +377,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Thus, this example creates and uses a layer called "mylayer".
|
||||
<note>
|
||||
You can find additional information on adding a layer at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
<para>Minimally, you need a recipe file and a layer configuration
|
||||
@@ -399,7 +400,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
<link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
|
||||
|
||||
<link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
|
||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
|
||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR}/"
|
||||
</literallayout>
|
||||
For information on these variables, click the links
|
||||
to go to the definitions in the glossary.</para>
|
||||
|
||||
@@ -471,7 +471,7 @@
|
||||
Following is the usage and syntax for BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -h
|
||||
Usage: bitbake [options] [recipename/target recipe:do_task ...]
|
||||
Usage: bitbake [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
@@ -529,11 +529,9 @@
|
||||
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
||||
Show debug logging for the specified logging domains
|
||||
-P, --profile Profile the command and save reports.
|
||||
-u UI, --ui=UI The user interface to use (depexp, goggle, hob, knotty
|
||||
or ncurses - default knotty).
|
||||
-u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server type to use (process or xmlrpc -
|
||||
default process).
|
||||
Choose which server to use, process or xmlrpc.
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
@@ -548,10 +546,6 @@
|
||||
-m, --kill-server Terminate the remote server.
|
||||
--observe-only Connect to a server as an observing-only client.
|
||||
--status-only Check the status of the remote bitbake server.
|
||||
-w WRITEEVENTLOG, --write-log=WRITEEVENTLOG
|
||||
Writes the event log of the build to a bitbake event
|
||||
json file. Use '' (empty string) to assign the name
|
||||
automatically.
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -634,25 +628,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='executing-a-list-of-task-and-recipe-combinations'>
|
||||
<title>Executing a List of Task and Recipe Combinations</title>
|
||||
|
||||
<para>
|
||||
The BitBake command line supports specifying different
|
||||
tasks for individual targets when you specify multiple
|
||||
targets.
|
||||
For example, suppose you had two targets (or recipes)
|
||||
<filename>myfirstrecipe</filename> and
|
||||
<filename>mysecondrecipe</filename> and you needed
|
||||
BitBake to run <filename>taskA</filename> for the first
|
||||
recipe and <filename>taskB</filename> for the second
|
||||
recipe:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='generating-dependency-graphs'>
|
||||
<title>Generating Dependency Graphs</title>
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -52,7 +52,7 @@
|
||||
<link linkend='var-MIRRORS'>M</link>
|
||||
<!-- <link linkend='var-glossary-n'>N</link> -->
|
||||
<link linkend='var-OVERRIDES'>O</link>
|
||||
<link linkend='var-P4DIR'>P</link>
|
||||
<link linkend='var-PACKAGES'>P</link>
|
||||
<!-- <link linkend='var-QMAKE_PROFILES'>Q</link> -->
|
||||
<link linkend='var-RDEPENDS'>R</link>
|
||||
<link linkend='var-SECTION'>S</link>
|
||||
@@ -102,56 +102,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Specifies a space-delimited list of hosts that the fetcher
|
||||
is allowed to use to obtain the required source code.
|
||||
Following are considerations surrounding this variable:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
This host list is only used if
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
is either not set or set to "0".
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Limited support for wildcard matching against the
|
||||
beginning of host names exists.
|
||||
For example, the following setting matches
|
||||
<filename>git.gnu.org</filename>,
|
||||
<filename>ftp.gnu.org</filename>, and
|
||||
<filename>foo.git.gnu.org</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
BB_ALLOWED_NETWORKS = "*.gnu.org"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mirrors not in the host list are skipped and
|
||||
logged in debug.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Attempts to access networks not in the host list
|
||||
cause a failure.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Using <filename>BB_ALLOWED_NETWORKS</filename> in
|
||||
conjunction with
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
is very useful.
|
||||
Adding the host you want to use to
|
||||
<filename>PREMIRRORS</filename> results in the source code
|
||||
being fetched from an allowed location and avoids raising
|
||||
an error when a host that is not allowed is in a
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
statement.
|
||||
This is because the fetcher does not attempt to use the
|
||||
host listed in <filename>SRC_URI</filename> after a
|
||||
successful fetch from the
|
||||
<filename>PREMIRRORS</filename> occurs.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -856,56 +806,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows adjustment of a task's Input/Output priority.
|
||||
During Autobuilder testing, random failures can occur
|
||||
for tasks due to I/O starvation.
|
||||
These failures occur during various QEMU runtime timeouts.
|
||||
You can use the <filename>BB_TASK_IONICE_LEVEL</filename>
|
||||
variable to adjust the I/O priority of these tasks.
|
||||
<note>
|
||||
This variable works similarly to the
|
||||
<link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
|
||||
variable except with a task's I/O priorities.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Set the variable as follows:
|
||||
<literallayout class='monospaced'>
|
||||
BB_TASK_IONICE_LEVEL = "<replaceable>class</replaceable>.<replaceable>prio</replaceable>"
|
||||
</literallayout>
|
||||
For <replaceable>class</replaceable>, the default value is
|
||||
"2", which is a best effort.
|
||||
You can use "1" for realtime and "3" for idle.
|
||||
If you want to use realtime, you must have superuser
|
||||
privileges.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For <replaceable>prio</replaceable>, you can use any
|
||||
value from "0", which is the highest priority, to "7",
|
||||
which is the lowest.
|
||||
The default value is "4".
|
||||
You do not need any special privileges to use this range
|
||||
of priority values.
|
||||
<note>
|
||||
In order for your I/O priority settings to take effect,
|
||||
you need the Completely Fair Queuing (CFQ) Scheduler
|
||||
selected for the backing block device.
|
||||
To select the scheduler, use the following command form
|
||||
where <replaceable>device</replaceable> is the device
|
||||
(e.g. sda, sdb, and so forth):
|
||||
<literallayout class='monospaced'>
|
||||
$ sudo sh -c “echo cfq > /sys/block/<replaceable>device</replaceable>/queu/scheduler
|
||||
</literallayout>
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -972,7 +872,7 @@
|
||||
that run on the target <filename>MACHINE</filename>;
|
||||
"nativesdk", which targets the SDK machine instead of
|
||||
<filename>MACHINE</filename>; and "mulitlibs" in the form
|
||||
"<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
|
||||
"<filename>multilib:<multilib_name></filename>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -984,31 +884,8 @@
|
||||
metadata:
|
||||
<literallayout class='monospaced'>
|
||||
BBCLASSEXTEND =+ "native nativesdk"
|
||||
BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
|
||||
BBCLASSEXTEND =+ "multilib:<multilib_name>"
|
||||
</literallayout>
|
||||
<note>
|
||||
<para>
|
||||
Internally, the <filename>BBCLASSEXTEND</filename>
|
||||
mechanism generates recipe variants by rewriting
|
||||
variable values and applying overrides such as
|
||||
<filename>_class-native</filename>.
|
||||
For example, to generate a native version of a recipe,
|
||||
a
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
on "foo" is rewritten to a <filename>DEPENDS</filename>
|
||||
on "foo-native".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Even when using <filename>BBCLASSEXTEND</filename>, the
|
||||
recipe is only parsed once.
|
||||
Parsing once adds some limitations.
|
||||
For example, it is not possible to
|
||||
include a different file depending on the variant,
|
||||
since <filename>include</filename> statements are
|
||||
processed when the recipe is parsed.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
@@ -1017,7 +894,7 @@
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the BitBake debug output level to a specific value
|
||||
as incremented by the <filename>-D</filename> command line
|
||||
as incremented by the <filename>-d</filename> command line
|
||||
option.
|
||||
<note>
|
||||
You must set this variable in the external environment
|
||||
@@ -1139,20 +1016,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the base location where layers are stored.
|
||||
By default, this location is set to
|
||||
<filename>${COREBASE}</filename>.
|
||||
This setting is used in conjunction with
|
||||
<filename>bitbake-layers layerindex-fetch</filename> and
|
||||
tells <filename>bitbake-layers</filename> where to place
|
||||
the fetched layers.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1165,14 +1028,14 @@
|
||||
to "hide" these <filename>.bb</filename> and
|
||||
<filename>.bbappend</filename> files.
|
||||
BitBake ignores any recipe or recipe append files that
|
||||
match any of the expressions.
|
||||
match the expression.
|
||||
It is as if BitBake does not see them at all.
|
||||
Consequently, matching files are not parsed or otherwise
|
||||
used by BitBake.</para>
|
||||
<para>
|
||||
The values you provide are passed to Python's regular
|
||||
The value you provide is passed to Python's regular
|
||||
expression compiler.
|
||||
The expressions are compared against the full paths to
|
||||
The expression is compared against the full paths to
|
||||
the files.
|
||||
For complete syntax information, see Python's
|
||||
documentation at
|
||||
@@ -1188,16 +1051,18 @@
|
||||
BBMASK = "meta-ti/recipes-misc/"
|
||||
</literallayout>
|
||||
If you want to mask out multiple directories or recipes,
|
||||
you can specify multiple regular expression fragments.
|
||||
use the vertical bar to separate the regular expression
|
||||
fragments.
|
||||
This next example masks out multiple directories and
|
||||
individual recipes:
|
||||
<literallayout class='monospaced'>
|
||||
BBMASK += "/meta-ti/recipes-misc/ meta-ti/recipes-ti/packagegroup/"
|
||||
BBMASK += "/meta-oe/recipes-support/"
|
||||
BBMASK += "/meta-foo/.*/openldap"
|
||||
BBMASK += "opencv.*\.bbappend"
|
||||
BBMASK += "lzma"
|
||||
BBMASK = "meta-ti/recipes-misc/|meta-ti/recipes-ti/packagegroup/"
|
||||
BBMASK .= "|.*meta-oe/recipes-support/"
|
||||
BBMASK .= "|.*openldap"
|
||||
BBMASK .= "|.*opencv"
|
||||
BBMASK .= "|.*lzma"
|
||||
</literallayout>
|
||||
Notice how the vertical bar is used to append the fragments.
|
||||
<note>
|
||||
When specifying a directory name, use the trailing
|
||||
slash character to ensure you match just that directory
|
||||
@@ -1226,9 +1091,9 @@
|
||||
Set the variable as you would any environment variable
|
||||
and then run BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>build_directory</replaceable>"
|
||||
$ BBPATH="<build_directory>"
|
||||
$ export BBPATH
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1244,15 +1109,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows you to use a configuration file to add to the list
|
||||
of command-line target recipes you want to build.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1659,17 +1515,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-LAYERDIR_RE'><glossterm>LAYERDIR_RE</glossterm>
|
||||
<glossdef>
|
||||
<para>When used inside the <filename>layer.conf</filename> configuration
|
||||
file, this variable provides the path of the current layer,
|
||||
escaped for use in a regular expression
|
||||
(<link linkend='var-BBFILE_PATTERN'><filename>BBFILE_PATTERN</filename></link>).
|
||||
This variable is not available outside of <filename>layer.conf</filename>
|
||||
and references are expanded immediately when parsing of the file completes.</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
|
||||
<glossdef>
|
||||
<para>Optionally specifies the version of a layer as a single number.
|
||||
@@ -1771,15 +1616,6 @@
|
||||
|
||||
<glossdiv id='var-glossary-p'><title>P</title>
|
||||
|
||||
<glossentry id='var-P4DIR'><glossterm>P4DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Perforce depot
|
||||
is stored when it is fetched.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-PACKAGES'><glossterm>PACKAGES</glossterm>
|
||||
<glossdef>
|
||||
<para>The list of packages the recipe creates.
|
||||
@@ -1976,27 +1812,6 @@
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In addition to providing recipes under alternate names,
|
||||
the <filename>PROVIDES</filename> mechanism is also used
|
||||
to implement virtual targets.
|
||||
A virtual target is a name that corresponds to some
|
||||
particular functionality (e.g. a Linux kernel).
|
||||
Recipes that provide the functionality in question list the
|
||||
virtual target in <filename>PROVIDES</filename>.
|
||||
Recipes that depend on the functionality in question can
|
||||
include the virtual target in
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
to leave the choice of provider open.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Conventionally, virtual targets have names on the form
|
||||
"virtual/function" (e.g. "virtual/kernel").
|
||||
The slash is simply part of the name and has no
|
||||
syntactical significance.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
@@ -2073,7 +1888,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RDEPENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RDEPENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2139,7 +1954,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RRECOMMENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RRECOMMENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
-->
|
||||
|
||||
<copyright>
|
||||
<year>2004-2016</year>
|
||||
<year>2004-2015</year>
|
||||
<holder>Richard Purdie</holder>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>and Phil Blundell</holder>
|
||||
|
||||
@@ -23,17 +23,19 @@
|
||||
# Assign a file to __warn__ to get warnings about slow operations.
|
||||
#
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import copy
|
||||
import types
|
||||
ImmutableTypes = (
|
||||
types.NoneType,
|
||||
bool,
|
||||
complex,
|
||||
float,
|
||||
int,
|
||||
long,
|
||||
tuple,
|
||||
frozenset,
|
||||
str
|
||||
basestring
|
||||
)
|
||||
|
||||
MUTABLE = "__mutable__"
|
||||
@@ -59,7 +61,7 @@ class COWDictMeta(COWMeta):
|
||||
__call__ = cow
|
||||
|
||||
def __setitem__(cls, key, value):
|
||||
if value is not None and not isinstance(value, ImmutableTypes):
|
||||
if not isinstance(value, ImmutableTypes):
|
||||
if not isinstance(value, COWMeta):
|
||||
cls.__hasmutable__ = True
|
||||
key += MUTABLE
|
||||
@@ -114,7 +116,7 @@ class COWDictMeta(COWMeta):
|
||||
cls.__setitem__(key, cls.__marker__)
|
||||
|
||||
def __revertitem__(cls, key):
|
||||
if key not in cls.__dict__:
|
||||
if not cls.__dict__.has_key(key):
|
||||
key += MUTABLE
|
||||
delattr(cls, key)
|
||||
|
||||
@@ -181,7 +183,7 @@ class COWSetMeta(COWDictMeta):
|
||||
COWDictMeta.__delitem__(cls, repr(hash(value)))
|
||||
|
||||
def __in__(cls, value):
|
||||
return repr(hash(value)) in COWDictMeta
|
||||
return COWDictMeta.has_key(repr(hash(value)))
|
||||
|
||||
def iterkeys(cls):
|
||||
raise TypeError("sets don't have keys")
|
||||
@@ -190,10 +192,12 @@ class COWSetMeta(COWDictMeta):
|
||||
raise TypeError("sets don't have 'items'")
|
||||
|
||||
# These are the actual classes you use!
|
||||
class COWDictBase(object, metaclass = COWDictMeta):
|
||||
class COWDictBase(object):
|
||||
__metaclass__ = COWDictMeta
|
||||
__count__ = 0
|
||||
|
||||
class COWSetBase(object, metaclass = COWSetMeta):
|
||||
class COWSetBase(object):
|
||||
__metaclass__ = COWSetMeta
|
||||
__count__ = 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -213,11 +217,11 @@ if __name__ == "__main__":
|
||||
print()
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b.items():
|
||||
for x in b.iteritems():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -225,11 +229,11 @@ if __name__ == "__main__":
|
||||
b['a'] = 'c'
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b.items():
|
||||
for x in b.iteritems():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -244,22 +248,22 @@ if __name__ == "__main__":
|
||||
a['set'].add("o2")
|
||||
|
||||
print("a", a)
|
||||
for x in a['set'].values():
|
||||
for x in a['set'].itervalues():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b['set'].values():
|
||||
for x in b['set'].itervalues():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
b['set'].add('o3')
|
||||
|
||||
print("a", a)
|
||||
for x in a['set'].values():
|
||||
for x in a['set'].itervalues():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b['set'].values():
|
||||
for x in b['set'].itervalues():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -269,7 +273,7 @@ if __name__ == "__main__":
|
||||
a['set2'].add("o2")
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -283,13 +287,13 @@ if __name__ == "__main__":
|
||||
except KeyError:
|
||||
print("Yay! deleted key raises error")
|
||||
|
||||
if 'b' in b:
|
||||
if b.has_key('b'):
|
||||
print("Boo!")
|
||||
else:
|
||||
print("Yay - has_key with delete works!")
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -300,7 +304,7 @@ if __name__ == "__main__":
|
||||
b.__revertitem__('b')
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -310,7 +314,7 @@ if __name__ == "__main__":
|
||||
|
||||
b.__revertitem__('dict')
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
|
||||
@@ -21,11 +21,11 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.31.1"
|
||||
__version__ = "1.26.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (3, 4, 0):
|
||||
raise RuntimeError("Sorry, python 3.4.0 or later is required for this version of bitbake")
|
||||
if sys.version_info < (2, 7, 3):
|
||||
raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
|
||||
|
||||
|
||||
class BBHandledException(Exception):
|
||||
@@ -70,8 +70,6 @@ logger = logging.getLogger("BitBake")
|
||||
logger.addHandler(NullHandler())
|
||||
logger.setLevel(logging.DEBUG - 2)
|
||||
|
||||
mainlogger = logging.getLogger("BitBake.Main")
|
||||
|
||||
# This has to be imported after the setLoggerClass, as the import of bb.msg
|
||||
# can result in construction of the various loggers.
|
||||
import bb.msg
|
||||
@@ -81,26 +79,26 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||
|
||||
# Messaging convenience functions
|
||||
def plain(*args):
|
||||
mainlogger.plain(''.join(args))
|
||||
logger.plain(''.join(args))
|
||||
|
||||
def debug(lvl, *args):
|
||||
if isinstance(lvl, str):
|
||||
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
if isinstance(lvl, basestring):
|
||||
logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
args = (lvl,) + args
|
||||
lvl = 1
|
||||
mainlogger.debug(lvl, ''.join(args))
|
||||
logger.debug(lvl, ''.join(args))
|
||||
|
||||
def note(*args):
|
||||
mainlogger.info(''.join(args))
|
||||
logger.info(''.join(args))
|
||||
|
||||
def warn(*args):
|
||||
mainlogger.warning(''.join(args))
|
||||
logger.warn(''.join(args))
|
||||
|
||||
def error(*args, **kwargs):
|
||||
mainlogger.error(''.join(args), extra=kwargs)
|
||||
def error(*args):
|
||||
logger.error(''.join(args))
|
||||
|
||||
def fatal(*args, **kwargs):
|
||||
mainlogger.critical(''.join(args), extra=kwargs)
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
raise BBHandledException()
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
|
||||
@@ -31,43 +31,23 @@ import logging
|
||||
import shlex
|
||||
import glob
|
||||
import time
|
||||
import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
import bb.progress
|
||||
from bb import data, event, utils
|
||||
from contextlib import nested
|
||||
from bb import event, utils
|
||||
|
||||
bblogger = logging.getLogger('BitBake')
|
||||
logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
__mtime_cache = {}
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if f not in __mtime_cache:
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def reset_cache():
|
||||
global __mtime_cache
|
||||
__mtime_cache = {}
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
builtins['bb'] = bb
|
||||
builtins['os'] = os
|
||||
__builtins__['bb'] = bb
|
||||
__builtins__['os'] = os
|
||||
|
||||
class FuncFailed(Exception):
|
||||
def __init__(self, name = None, logfile = None):
|
||||
@@ -138,25 +118,6 @@ class TaskInvalid(TaskBase):
|
||||
super(TaskInvalid, self).__init__(task, None, metadata)
|
||||
self._message = "No such task '%s'" % task
|
||||
|
||||
class TaskProgress(event.Event):
|
||||
"""
|
||||
Task made some progress that could be reported to the user, usually in
|
||||
the form of a progress bar or similar.
|
||||
NOTE: this class does not inherit from TaskBase since it doesn't need
|
||||
to - it's fired within the task context itself, so we don't have any of
|
||||
the context information that you do in the case of the other events.
|
||||
The event PID can be used to determine which task it came from.
|
||||
The progress value is normally 0-100, but can also be negative
|
||||
indicating that progress has been made but we aren't able to determine
|
||||
how much.
|
||||
The rate is optional, this is simply an extra string to display to the
|
||||
user if specified.
|
||||
"""
|
||||
def __init__(self, progress, rate=None):
|
||||
self.progress = progress
|
||||
self.rate = rate
|
||||
event.Event.__init__(self)
|
||||
|
||||
|
||||
class LogTee(object):
|
||||
def __init__(self, logger, outfile):
|
||||
@@ -180,18 +141,14 @@ class LogTee(object):
|
||||
def flush(self):
|
||||
self.outfile.flush()
|
||||
|
||||
#
|
||||
# pythonexception allows the python exceptions generated to be raised
|
||||
# as the real exceptions (not FuncFailed) and without a backtrace at the
|
||||
# origin of the failure.
|
||||
#
|
||||
def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
|
||||
try:
|
||||
oldcwd = os.getcwd()
|
||||
except:
|
||||
oldcwd = None
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
return
|
||||
|
||||
flags = d.getVarFlags(func)
|
||||
cleandirs = flags.get('cleandirs')
|
||||
@@ -210,13 +167,8 @@ def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
bb.utils.mkdirhier(adir)
|
||||
adir = dirs[-1]
|
||||
else:
|
||||
adir = None
|
||||
|
||||
body = d.getVar(func, False)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warning("Function %s doesn't exist", func)
|
||||
return
|
||||
adir = d.getVar('B', True)
|
||||
bb.utils.mkdirhier(adir)
|
||||
|
||||
ispython = flags.get('python')
|
||||
|
||||
@@ -257,25 +209,22 @@ def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
|
||||
with bb.utils.fileslocked(lockfiles):
|
||||
if ispython:
|
||||
exec_func_python(func, d, runfile, cwd=adir, pythonexception=pythonexception)
|
||||
exec_func_python(func, d, runfile, cwd=adir)
|
||||
else:
|
||||
exec_func_shell(func, d, runfile, cwd=adir)
|
||||
|
||||
if oldcwd and os.getcwd() != oldcwd:
|
||||
try:
|
||||
bb.warn("Task %s changed cwd to %s" % (func, os.getcwd()))
|
||||
os.chdir(oldcwd)
|
||||
except:
|
||||
pass
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}
|
||||
|
||||
{function}(d)
|
||||
"""
|
||||
logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
||||
def exec_func_python(func, d, runfile, cwd=None):
|
||||
"""Execute a python BB 'function'"""
|
||||
|
||||
code = _functionfmt.format(function=func)
|
||||
bbfile = d.getVar('FILE', True)
|
||||
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
with open(runfile, 'w') as script:
|
||||
bb.data.emit_func_python(func, script, d)
|
||||
@@ -283,26 +232,18 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
||||
if cwd:
|
||||
try:
|
||||
olddir = os.getcwd()
|
||||
except OSError as e:
|
||||
bb.warn("%s: Cannot get cwd: %s" % (func, e))
|
||||
except OSError:
|
||||
olddir = None
|
||||
os.chdir(cwd)
|
||||
|
||||
bb.debug(2, "Executing python function %s" % func)
|
||||
|
||||
try:
|
||||
text = "def %s(d):\n%s" % (func, d.getVar(func, False))
|
||||
fn = d.getVarFlag(func, "filename", False)
|
||||
lineno = int(d.getVarFlag(func, "lineno", False))
|
||||
bb.methodpool.insert_method(func, text, fn, lineno - 1)
|
||||
|
||||
comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
|
||||
utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
||||
raise
|
||||
except:
|
||||
if pythonexception:
|
||||
raise
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
@@ -310,8 +251,8 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
||||
if cwd and olddir:
|
||||
try:
|
||||
os.chdir(olddir)
|
||||
except OSError as e:
|
||||
bb.warn("%s: Cannot restore cwd %s: %s" % (func, olddir, e))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def shell_trap_code():
|
||||
return '''#!/bin/sh\n
|
||||
@@ -321,8 +262,9 @@ bb_exit_handler() {
|
||||
case $ret in
|
||||
0) ;;
|
||||
*) case $BASH_VERSION in
|
||||
"") echo "WARNING: exit code $ret from a shell command.";;
|
||||
*) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
|
||||
"") echo "WARNING: exit code $ret from a shell command.";;
|
||||
*) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from
|
||||
\"$BASH_COMMAND\"";;
|
||||
esac
|
||||
exit $ret
|
||||
esac
|
||||
@@ -356,13 +298,13 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
# cleanup
|
||||
ret=$?
|
||||
trap '' 0
|
||||
exit $ret
|
||||
exit $?
|
||||
''')
|
||||
|
||||
os.chmod(runfile, 0o775)
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot', False):
|
||||
if d.getVarFlag(func, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
@@ -372,75 +314,14 @@ exit $ret
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
progress = d.getVarFlag(func, 'progress', True)
|
||||
if progress:
|
||||
if progress == 'percent':
|
||||
# Use default regex
|
||||
logfile = bb.progress.BasicProgressHandler(d, outfile=logfile)
|
||||
elif progress.startswith('percent:'):
|
||||
# Use specified regex
|
||||
logfile = bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
|
||||
elif progress.startswith('outof:'):
|
||||
# Use specified regex
|
||||
logfile = bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
|
||||
else:
|
||||
bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
fifobuffer = bytearray()
|
||||
def readfifo(data):
|
||||
nonlocal fifobuffer
|
||||
fifobuffer.extend(data)
|
||||
while fifobuffer:
|
||||
message, token, nextmsg = fifobuffer.partition(b"\00")
|
||||
if token:
|
||||
splitval = message.split(b' ', 1)
|
||||
cmd = splitval[0].decode("utf-8")
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1].decode("utf-8")
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
else:
|
||||
bb.warn("Unrecognised command '%s' on FIFO" % cmd)
|
||||
fifobuffer = nextmsg
|
||||
else:
|
||||
break
|
||||
|
||||
tempdir = d.getVar('T', True)
|
||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
||||
if os.path.exists(fifopath):
|
||||
os.unlink(fifopath)
|
||||
os.mkfifo(fifopath)
|
||||
with open(fifopath, 'r+b', buffering=0) as fifo:
|
||||
try:
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
finally:
|
||||
os.unlink(fifopath)
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
|
||||
@@ -460,7 +341,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
Execution of a task involves a bit more setup than executing a function,
|
||||
running it with its own local metadata, and with some useful variables set.
|
||||
"""
|
||||
if not d.getVarFlag(task, 'task', False):
|
||||
if not d.getVarFlag(task, 'task'):
|
||||
event.fire(TaskInvalid(task, d), d)
|
||||
logger.error("No such task: %s" % task)
|
||||
return 1
|
||||
@@ -479,13 +360,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
nice = int(nice) - curnice
|
||||
newnice = os.nice(nice)
|
||||
logger.debug(1, "Renice to %s " % newnice)
|
||||
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
|
||||
if ionice:
|
||||
try:
|
||||
cls, prio = ionice.split(".", 1)
|
||||
bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
|
||||
except:
|
||||
bb.warn("Invalid ionice level %s" % ionice)
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
@@ -521,10 +395,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
if getattr(record, 'forcelog', False):
|
||||
self.triggered = False
|
||||
else:
|
||||
self.triggered = True
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = open('/dev/null', 'r')
|
||||
@@ -575,9 +446,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
logger.error(str(exc))
|
||||
event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
|
||||
return 1
|
||||
except bb.BBHandledException:
|
||||
event.fire(TaskFailed(task, logfn, localdata, True), localdata)
|
||||
return 1
|
||||
finally:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
@@ -602,7 +470,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bb.utils.remove(loglink)
|
||||
event.fire(TaskSucceeded(task, logfn, localdata), localdata)
|
||||
|
||||
if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
|
||||
if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
|
||||
make_stamp(task, localdata)
|
||||
|
||||
return 0
|
||||
@@ -610,7 +478,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
def exec_task(fn, task, d, profile = False):
|
||||
try:
|
||||
quieterr = False
|
||||
if d.getVarFlag(task, "quieterrors", False) is not None:
|
||||
if d.getVarFlag(task, "quieterrors") is not None:
|
||||
quieterr = True
|
||||
|
||||
if profile:
|
||||
@@ -637,7 +505,7 @@ def exec_task(fn, task, d, profile = False):
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Makes sure the stamp directory exists
|
||||
@@ -651,17 +519,15 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
||||
taskflagname = taskname.replace("_setscene", "")
|
||||
|
||||
if file_name:
|
||||
stamp = d.stamp[file_name]
|
||||
stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
|
||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||
else:
|
||||
stamp = d.getVar('STAMP', True)
|
||||
stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if baseonly:
|
||||
return stamp
|
||||
if noextra:
|
||||
extrainfo = ""
|
||||
|
||||
if not stamp:
|
||||
return
|
||||
@@ -669,7 +535,7 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if cached_mtime_noerror(stampdir) == 0:
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
|
||||
return stamp
|
||||
@@ -687,10 +553,10 @@ def stamp_cleanmask_internal(taskname, d, file_name):
|
||||
taskflagname = taskname.replace("_setscene", "")
|
||||
|
||||
if file_name:
|
||||
stamp = d.stampclean[file_name]
|
||||
stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
|
||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||
else:
|
||||
stamp = d.getVar('STAMPCLEAN', True)
|
||||
stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
@@ -757,15 +623,15 @@ def write_taint(task, d, file_name = None):
|
||||
with open(taintfn, 'w') as taintf:
|
||||
taintf.write(str(uuid.uuid4()))
|
||||
|
||||
def stampfile(taskname, d, file_name = None, noextra=False):
|
||||
def stampfile(taskname, d, file_name = None):
|
||||
"""
|
||||
Return the stamp for a given task
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
return stamp_internal(taskname, d, file_name, noextra=noextra)
|
||||
return stamp_internal(taskname, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, d):
|
||||
task_deps = d.getVar('_task_deps', False)
|
||||
def add_tasks(tasklist, deltasklist, d):
|
||||
task_deps = d.getVar('_task_deps')
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
@@ -776,6 +642,9 @@ def add_tasks(tasklist, d):
|
||||
for task in tasklist:
|
||||
task = d.expand(task)
|
||||
|
||||
if task in deltasklist:
|
||||
continue
|
||||
|
||||
d.setVarFlag(task, 'task', 1)
|
||||
|
||||
if not task in task_deps['tasks']:
|
||||
@@ -812,12 +681,12 @@ def addtask(task, before, after, d):
|
||||
task = "do_" + task
|
||||
|
||||
d.setVarFlag(task, "task", 1)
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task not in bbtasks:
|
||||
bbtasks = d.getVar('__BBTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
|
||||
existing = d.getVarFlag(task, "deps", False) or []
|
||||
existing = d.getVarFlag(task, "deps") or []
|
||||
if after is not None:
|
||||
# set up deps for function
|
||||
for entry in after.split():
|
||||
@@ -827,7 +696,7 @@ def addtask(task, before, after, d):
|
||||
if before is not None:
|
||||
# set up things that depend on this func
|
||||
for entry in before.split():
|
||||
existing = d.getVarFlag(entry, "deps", False) or []
|
||||
existing = d.getVarFlag(entry, "deps") or []
|
||||
if task not in existing:
|
||||
d.setVarFlag(entry, "deps", [task] + existing)
|
||||
|
||||
@@ -835,15 +704,8 @@ def deltask(task, d):
|
||||
if task[:3] != "do_":
|
||||
task = "do_" + task
|
||||
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task in bbtasks:
|
||||
bbtasks.remove(task)
|
||||
d.delVarFlag(task, 'task')
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
bbtasks = d.getVar('__BBDELTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBDELTASKS', bbtasks)
|
||||
|
||||
d.delVarFlag(task, 'deps')
|
||||
for bbtask in d.getVar('__BBTASKS', False) or []:
|
||||
deps = d.getVarFlag(bbtask, 'deps', False) or []
|
||||
if task in deps:
|
||||
deps.remove(task)
|
||||
d.setVarFlag(bbtask, 'deps', deps)
|
||||
|
||||
@@ -28,16 +28,22 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import pickle
|
||||
from collections import defaultdict
|
||||
import bb.utils
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
__cache_version__ = "150"
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "148"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -74,13 +80,13 @@ class RecipeInfoCommon(object):
|
||||
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
|
||||
for var in varlist)
|
||||
if squash:
|
||||
return dict((k,v) for (k,v) in out_dict.items() if v)
|
||||
return dict((k,v) for (k,v) in out_dict.iteritems() if v)
|
||||
else:
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata, expand = True):
|
||||
return metadata.getVar(var, expand) or ''
|
||||
def getvar(cls, var, metadata):
|
||||
return metadata.getVar(var, True) or ''
|
||||
|
||||
|
||||
class CoreRecipeInfo(RecipeInfoCommon):
|
||||
@@ -93,7 +99,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.timestamp = bb.parse.cached_mtime(filename)
|
||||
self.variants = self.listvar('__VARIANTS', metadata) + ['']
|
||||
self.appends = self.listvar('__BBAPPEND', metadata)
|
||||
self.nocache = self.getvar('BB_DONT_CACHE', metadata)
|
||||
self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
|
||||
|
||||
self.skipreason = self.getvar('__SKIPPED', metadata)
|
||||
if self.skipreason:
|
||||
@@ -123,6 +129,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
|
||||
self.stamp = self.getvar('STAMP', metadata)
|
||||
self.stampclean = self.getvar('STAMPCLEAN', metadata)
|
||||
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||
self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
|
||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||
@@ -134,11 +142,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
|
||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
||||
self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -151,6 +158,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
|
||||
cachedata.stamp = {}
|
||||
cachedata.stampclean = {}
|
||||
cachedata.stamp_base = {}
|
||||
cachedata.stamp_base_clean = {}
|
||||
cachedata.stamp_extrainfo = {}
|
||||
cachedata.file_checksums = {}
|
||||
cachedata.fn_provides = {}
|
||||
@@ -174,7 +183,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.fakerootenv = {}
|
||||
cachedata.fakerootnoenv = {}
|
||||
cachedata.fakerootdirs = {}
|
||||
cachedata.extradepsfunc = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
cachedata.task_deps[fn] = self.task_deps
|
||||
@@ -184,6 +192,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.pkg_dp[fn] = self.defaultpref
|
||||
cachedata.stamp[fn] = self.stamp
|
||||
cachedata.stampclean[fn] = self.stampclean
|
||||
cachedata.stamp_base[fn] = self.stamp_base
|
||||
cachedata.stamp_base_clean[fn] = self.stamp_base_clean
|
||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||
cachedata.file_checksums[fn] = self.file_checksums
|
||||
|
||||
@@ -210,8 +220,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
rprovides += self.rprovides_pkg[package]
|
||||
|
||||
for rprovide in rprovides:
|
||||
if fn not in cachedata.rproviders[rprovide]:
|
||||
cachedata.rproviders[rprovide].append(fn)
|
||||
cachedata.rproviders[rprovide].append(fn)
|
||||
|
||||
for package in self.packages_dynamic:
|
||||
cachedata.packages_dynamic[package].append(fn)
|
||||
@@ -234,7 +243,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.universe_target.append(self.pn)
|
||||
|
||||
cachedata.hashfn[fn] = self.hashfilename
|
||||
for task, taskhash in self.basetaskhashes.items():
|
||||
for task, taskhash in self.basetaskhashes.iteritems():
|
||||
identifier = '%s.%s' % (fn, task)
|
||||
cachedata.basetaskhash[identifier] = taskhash
|
||||
|
||||
@@ -242,138 +251,15 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.fakerootenv[fn] = self.fakerootenv
|
||||
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
|
||||
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
||||
cachedata.extradepsfunc[fn] = self.extradepsfunc
|
||||
|
||||
def virtualfn2realfn(virtualfn):
|
||||
"""
|
||||
Convert a virtual file name to a real one + the associated subclass keyword
|
||||
"""
|
||||
mc = ""
|
||||
if virtualfn.startswith('multiconfig:'):
|
||||
elems = virtualfn.split(':')
|
||||
mc = elems[1]
|
||||
virtualfn = ":".join(elems[2:])
|
||||
|
||||
fn = virtualfn
|
||||
cls = ""
|
||||
if virtualfn.startswith('virtual:'):
|
||||
elems = virtualfn.split(':')
|
||||
cls = ":".join(elems[1:-1])
|
||||
fn = elems[-1]
|
||||
|
||||
return (fn, cls, mc)
|
||||
|
||||
def realfn2virtual(realfn, cls, mc):
|
||||
"""
|
||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||
"""
|
||||
if cls:
|
||||
realfn = "virtual:" + cls + ":" + realfn
|
||||
if mc:
|
||||
realfn = "multiconfig:" + mc + ":" + realfn
|
||||
return realfn
|
||||
|
||||
def variant2virtual(realfn, variant):
|
||||
"""
|
||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||
"""
|
||||
if variant == "":
|
||||
return realfn
|
||||
if variant.startswith("multiconfig:"):
|
||||
elems = variant.split(":")
|
||||
if elems[2]:
|
||||
return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
|
||||
return "multiconfig:" + elems[1] + ":" + realfn
|
||||
return "virtual:" + variant + ":" + realfn
|
||||
|
||||
def parse_recipe(bb_data, bbfile, appends, mc=''):
|
||||
"""
|
||||
Parse a recipe
|
||||
"""
|
||||
|
||||
chdir_back = False
|
||||
|
||||
bb_data.setVar("__BBMULTICONFIG", mc)
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "")
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
bb.parse.cached_mtime_noerror(bbfile_loc)
|
||||
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not bb_data.getVar('TOPDIR', False):
|
||||
chdir_back = True
|
||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
||||
try:
|
||||
if appends:
|
||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
||||
bb_data = bb.parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
return bb_data
|
||||
except:
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
raise
|
||||
|
||||
|
||||
|
||||
class NoCache(object):
|
||||
|
||||
def __init__(self, databuilder):
|
||||
self.databuilder = databuilder
|
||||
self.data = databuilder.data
|
||||
|
||||
def loadDataFull(self, virtualfn, appends):
|
||||
"""
|
||||
Return a complete set of data for fn.
|
||||
To do this, we need to parse the file.
|
||||
"""
|
||||
logger.debug(1, "Parsing %s (full)" % virtualfn)
|
||||
(fn, virtual, mc) = virtualfn2realfn(virtualfn)
|
||||
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
|
||||
return bb_data[virtual]
|
||||
|
||||
def load_bbfile(self, bbfile, appends, virtonly = False):
|
||||
"""
|
||||
Load and parse one .bb build file
|
||||
Return the data and whether parsing resulted in the file being skipped
|
||||
"""
|
||||
|
||||
if virtonly:
|
||||
(bbfile, virtual, mc) = virtualfn2realfn(bbfile)
|
||||
bb_data = self.databuilder.mcdata[mc].createCopy()
|
||||
bb_data.setVar("__ONLYFINALISE", virtual or "default")
|
||||
datastores = parse_recipe(bb_data, bbfile, appends, mc)
|
||||
return datastores
|
||||
|
||||
bb_data = self.data.createCopy()
|
||||
datastores = parse_recipe(bb_data, bbfile, appends)
|
||||
|
||||
for mc in self.databuilder.mcdata:
|
||||
if not mc:
|
||||
continue
|
||||
bb_data = self.databuilder.mcdata[mc].createCopy()
|
||||
newstores = parse_recipe(bb_data, bbfile, appends, mc)
|
||||
for ns in newstores:
|
||||
datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
|
||||
|
||||
return datastores
|
||||
|
||||
class Cache(NoCache):
|
||||
class Cache(object):
|
||||
"""
|
||||
BitBake Cache implementation
|
||||
"""
|
||||
|
||||
def __init__(self, databuilder, data_hash, caches_array):
|
||||
super().__init__(databuilder)
|
||||
data = databuilder.data
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
@@ -382,6 +268,7 @@ class Cache(NoCache):
|
||||
self.clean = set()
|
||||
self.checked = set()
|
||||
self.depends_cache = {}
|
||||
self.data = None
|
||||
self.data_fn = None
|
||||
self.cacheclean = True
|
||||
self.data_hash = data_hash
|
||||
@@ -401,74 +288,72 @@ class Cache(NoCache):
|
||||
cache_ok = True
|
||||
if self.caches_array:
|
||||
for cache_class in self.caches_array:
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cache_class.init_cacheData(self)
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
||||
cache_class.init_cacheData(self)
|
||||
if cache_ok:
|
||||
self.load_cachefile()
|
||||
elif os.path.isfile(self.cachefile):
|
||||
logger.info("Out of date cache found, rebuilding...")
|
||||
|
||||
def load_cachefile(self):
|
||||
# Firstly, using core cache file information for
|
||||
# valid checking
|
||||
with open(self.cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
try:
|
||||
cache_ver = pickled.load()
|
||||
bitbake_ver = pickled.load()
|
||||
except Exception:
|
||||
logger.info('Invalid cache, rebuilding...')
|
||||
return
|
||||
|
||||
if cache_ver != __cache_version__:
|
||||
logger.info('Cache version mismatch, rebuilding...')
|
||||
return
|
||||
elif bitbake_ver != bb.__version__:
|
||||
logger.info('Bitbake version mismatch, rebuilding...')
|
||||
return
|
||||
|
||||
|
||||
cachesize = 0
|
||||
previous_progress = 0
|
||||
previous_percent = 0
|
||||
|
||||
# Calculate the correct cachesize of all those cache files
|
||||
for cache_class in self.caches_array:
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||
|
||||
bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
|
||||
|
||||
for cache_class in self.caches_array:
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
# Check cache version information
|
||||
try:
|
||||
cache_ver = pickled.load()
|
||||
bitbake_ver = pickled.load()
|
||||
except Exception:
|
||||
logger.info('Invalid cache, rebuilding...')
|
||||
return
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if self.depends_cache.has_key(key):
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
# only fire events on even percentage boundaries
|
||||
current_progress = cachefile.tell() + previous_progress
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
self.data)
|
||||
|
||||
if cache_ver != __cache_version__:
|
||||
logger.info('Cache version mismatch, rebuilding...')
|
||||
return
|
||||
elif bitbake_ver != bb.__version__:
|
||||
logger.info('Bitbake version mismatch, rebuilding...')
|
||||
return
|
||||
|
||||
# Load the rest of the cache file
|
||||
current_progress = 0
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if not isinstance(key, str):
|
||||
bb.warn("%s from extras cache is not a string?" % key)
|
||||
break
|
||||
if not isinstance(value, RecipeInfoCommon):
|
||||
bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
|
||||
break
|
||||
|
||||
if key in self.depends_cache:
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
# only fire events on even percentage boundaries
|
||||
current_progress = cachefile.tell() + previous_progress
|
||||
current_percent = 100 * current_progress / cachesize
|
||||
if current_percent > previous_percent:
|
||||
previous_percent = current_percent
|
||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
||||
self.data)
|
||||
|
||||
previous_progress += current_progress
|
||||
previous_progress += current_progress
|
||||
|
||||
# Note: depends cache number is corresponding to the parsing file numbers.
|
||||
# The same file has several caches, still regarded as one item in the cache
|
||||
@@ -476,33 +361,69 @@ class Cache(NoCache):
|
||||
len(self.depends_cache)),
|
||||
self.data)
|
||||
|
||||
def parse(self, filename, appends):
|
||||
|
||||
@staticmethod
|
||||
def virtualfn2realfn(virtualfn):
|
||||
"""
|
||||
Convert a virtual file name to a real one + the associated subclass keyword
|
||||
"""
|
||||
|
||||
fn = virtualfn
|
||||
cls = ""
|
||||
if virtualfn.startswith('virtual:'):
|
||||
elems = virtualfn.split(':')
|
||||
cls = ":".join(elems[1:-1])
|
||||
fn = elems[-1]
|
||||
return (fn, cls)
|
||||
|
||||
@staticmethod
|
||||
def realfn2virtual(realfn, cls):
|
||||
"""
|
||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||
"""
|
||||
if cls == "":
|
||||
return realfn
|
||||
return "virtual:" + cls + ":" + realfn
|
||||
|
||||
@classmethod
|
||||
def loadDataFull(cls, virtualfn, appends, cfgData):
|
||||
"""
|
||||
Return a complete set of data for fn.
|
||||
To do this, we need to parse the file.
|
||||
"""
|
||||
|
||||
(fn, virtual) = cls.virtualfn2realfn(virtualfn)
|
||||
|
||||
logger.debug(1, "Parsing %s (full)", fn)
|
||||
|
||||
cfgData.setVar("__ONLYFINALISE", virtual or "default")
|
||||
bb_data = cls.load_bbfile(fn, appends, cfgData)
|
||||
return bb_data[virtual]
|
||||
|
||||
@classmethod
|
||||
def parse(cls, filename, appends, configdata, caches_array):
|
||||
"""Parse the specified filename, returning the recipe information"""
|
||||
logger.debug(1, "Parsing %s", filename)
|
||||
infos = []
|
||||
datastores = self.load_bbfile(filename, appends)
|
||||
datastores = cls.load_bbfile(filename, appends, configdata)
|
||||
depends = []
|
||||
variants = []
|
||||
# Process the "real" fn last so we can store variants list
|
||||
for variant, data in sorted(datastores.items(),
|
||||
for variant, data in sorted(datastores.iteritems(),
|
||||
key=lambda i: i[0],
|
||||
reverse=True):
|
||||
virtualfn = variant2virtual(filename, variant)
|
||||
variants.append(variant)
|
||||
virtualfn = cls.realfn2virtual(filename, variant)
|
||||
depends = depends + (data.getVar("__depends", False) or [])
|
||||
if depends and not variant:
|
||||
data.setVar("__depends", depends)
|
||||
if virtualfn == filename:
|
||||
data.setVar("__VARIANTS", " ".join(variants))
|
||||
|
||||
info_array = []
|
||||
for cache_class in self.caches_array:
|
||||
info = cache_class(filename, data)
|
||||
info_array.append(info)
|
||||
for cache_class in caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
info = cache_class(filename, data)
|
||||
info_array.append(info)
|
||||
infos.append((virtualfn, info_array))
|
||||
|
||||
return infos
|
||||
|
||||
def load(self, filename, appends):
|
||||
def load(self, filename, appends, configdata):
|
||||
"""Obtain the recipe information for the specified filename,
|
||||
using cached values if available, otherwise parsing.
|
||||
|
||||
@@ -516,20 +437,21 @@ class Cache(NoCache):
|
||||
# info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
|
||||
info_array = self.depends_cache[filename]
|
||||
for variant in info_array[0].variants:
|
||||
virtualfn = variant2virtual(filename, variant)
|
||||
virtualfn = self.realfn2virtual(filename, variant)
|
||||
infos.append((virtualfn, self.depends_cache[virtualfn]))
|
||||
else:
|
||||
logger.debug(1, "Parsing %s", filename)
|
||||
return self.parse(filename, appends, configdata, self.caches_array)
|
||||
|
||||
return cached, infos
|
||||
|
||||
def loadData(self, fn, appends, cacheData):
|
||||
def loadData(self, fn, appends, cfgData, cacheData):
|
||||
"""Load the recipe info for the specified filename,
|
||||
parsing and adding to the cache if necessary, and adding
|
||||
the recipe information to the supplied CacheData instance."""
|
||||
skipped, virtuals = 0, 0
|
||||
|
||||
cached, infos = self.load(fn, appends)
|
||||
cached, infos = self.load(fn, appends, cfgData)
|
||||
for virtualfn, info_array in infos:
|
||||
if info_array[0].skipped:
|
||||
logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
|
||||
@@ -606,20 +528,7 @@ class Cache(NoCache):
|
||||
|
||||
if hasattr(info_array[0], 'file_checksums'):
|
||||
for _, fl in info_array[0].file_checksums.items():
|
||||
fl = fl.strip()
|
||||
while fl:
|
||||
# A .split() would be simpler but means spaces or colons in filenames would break
|
||||
a = fl.find(":True")
|
||||
b = fl.find(":False")
|
||||
if ((a < 0) and b) or ((b > 0) and (b < a)):
|
||||
f = fl[:b+6]
|
||||
fl = fl[b+7:]
|
||||
elif ((b < 0) and a) or ((a > 0) and (a < b)):
|
||||
f = fl[:a+5]
|
||||
fl = fl[a+6:]
|
||||
else:
|
||||
break
|
||||
fl = fl.strip()
|
||||
for f in fl.split():
|
||||
if "*" in f:
|
||||
continue
|
||||
f, exist = f.split(":")
|
||||
@@ -637,19 +546,16 @@ class Cache(NoCache):
|
||||
|
||||
invalid = False
|
||||
for cls in info_array[0].variants:
|
||||
virtualfn = variant2virtual(fn, cls)
|
||||
virtualfn = self.realfn2virtual(fn, cls)
|
||||
self.clean.add(virtualfn)
|
||||
if virtualfn not in self.depends_cache:
|
||||
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
||||
invalid = True
|
||||
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
|
||||
logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
|
||||
invalid = True
|
||||
|
||||
# If any one of the variants is not present, mark as invalid for all
|
||||
if invalid:
|
||||
for cls in info_array[0].variants:
|
||||
virtualfn = variant2virtual(fn, cls)
|
||||
virtualfn = self.realfn2virtual(fn, cls)
|
||||
if virtualfn in self.clean:
|
||||
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
|
||||
self.clean.remove(virtualfn)
|
||||
@@ -686,19 +592,30 @@ class Cache(NoCache):
|
||||
logger.debug(2, "Cache is clean, not saving.")
|
||||
return
|
||||
|
||||
file_dict = {}
|
||||
pickler_dict = {}
|
||||
for cache_class in self.caches_array:
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
with open(cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(__cache_version__)
|
||||
p.dump(bb.__version__)
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
||||
file_dict[cache_class_name] = open(cachefile, "wb")
|
||||
pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
|
||||
pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
|
||||
|
||||
for key, info_array in self.depends_cache.items():
|
||||
for info in info_array:
|
||||
if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
|
||||
p.dump(key)
|
||||
p.dump(info)
|
||||
try:
|
||||
for key, info_array in self.depends_cache.iteritems():
|
||||
for info in info_array:
|
||||
if isinstance(info, RecipeInfoCommon):
|
||||
cache_class_name = info.__class__.__name__
|
||||
pickler_dict[cache_class_name].dump(key)
|
||||
pickler_dict[cache_class_name].dump(info)
|
||||
finally:
|
||||
for cache_class in self.caches_array:
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class_name = cache_class.__name__
|
||||
file_dict[cache_class_name].close()
|
||||
|
||||
del self.depends_cache
|
||||
|
||||
@@ -726,13 +643,50 @@ class Cache(NoCache):
|
||||
Save data we need into the cache
|
||||
"""
|
||||
|
||||
realfn = virtualfn2realfn(file_name)[0]
|
||||
realfn = self.virtualfn2realfn(file_name)[0]
|
||||
|
||||
info_array = []
|
||||
for cache_class in self.caches_array:
|
||||
info_array.append(cache_class(realfn, data))
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
info_array.append(cache_class(realfn, data))
|
||||
self.add_info(file_name, info_array, cacheData, parsed)
|
||||
|
||||
@staticmethod
|
||||
def load_bbfile(bbfile, appends, config):
|
||||
"""
|
||||
Load and parse one .bb build file
|
||||
Return the data and whether parsing resulted in the file being skipped
|
||||
"""
|
||||
chdir_back = False
|
||||
|
||||
from bb import data, parse
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
parse.cached_mtime_noerror(bbfile_loc)
|
||||
bb_data = data.init_db(config)
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not data.getVar('TOPDIR', bb_data):
|
||||
chdir_back = True
|
||||
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||
try:
|
||||
if appends:
|
||||
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||
bb_data = parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
return bb_data
|
||||
except:
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
raise
|
||||
|
||||
|
||||
def init(cooker):
|
||||
"""
|
||||
@@ -762,9 +716,8 @@ class CacheData(object):
|
||||
def __init__(self, caches_array):
|
||||
self.caches_array = caches_array
|
||||
for cache_class in self.caches_array:
|
||||
if not issubclass(cache_class, RecipeInfoCommon):
|
||||
bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
|
||||
cache_class.init_cacheData(self)
|
||||
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||
cache_class.init_cacheData(self)
|
||||
|
||||
# Direct cache variables
|
||||
self.task_queues = {}
|
||||
@@ -791,14 +744,13 @@ class MultiProcessCache(object):
|
||||
self.cachedata = self.create_cachedata()
|
||||
self.cachedata_extras = self.create_cachedata()
|
||||
|
||||
def init_cache(self, d, cache_file_name=None):
|
||||
def init_cache(self, d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir,
|
||||
cache_file_name or self.__class__.cache_file_name)
|
||||
self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
@@ -822,7 +774,7 @@ class MultiProcessCache(object):
|
||||
data = [{}]
|
||||
return data
|
||||
|
||||
def save_extras(self):
|
||||
def save_extras(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
@@ -852,7 +804,7 @@ class MultiProcessCache(object):
|
||||
if h not in dest[j]:
|
||||
dest[j][h] = source[j][h]
|
||||
|
||||
def save_merge(self):
|
||||
def save_merge(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
|
||||
@@ -15,17 +15,22 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glob
|
||||
import operator
|
||||
import os
|
||||
import stat
|
||||
import pickle
|
||||
import bb.utils
|
||||
import logging
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
# mtime cache (non-persistent)
|
||||
# based upon the assumption that files do not change during bitbake run
|
||||
class FileMtimeCache(object):
|
||||
@@ -83,52 +88,3 @@ class FileChecksumCache(MultiProcessCache):
|
||||
dest[0][h] = source[0][h]
|
||||
else:
|
||||
dest[0][h] = source[0][h]
|
||||
|
||||
def get_checksums(self, filelist, pn):
|
||||
"""Get checksums for a list of files"""
|
||||
|
||||
def checksum_file(f):
|
||||
try:
|
||||
checksum = self.get_checksum(f)
|
||||
except OSError as e:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
||||
return None
|
||||
return checksum
|
||||
|
||||
def checksum_dir(pth):
|
||||
# Handle directories recursively
|
||||
dirchecksums = []
|
||||
for root, dirs, files in os.walk(pth):
|
||||
for name in files:
|
||||
fullpth = os.path.join(root, name)
|
||||
checksum = checksum_file(fullpth)
|
||||
if checksum:
|
||||
dirchecksums.append((fullpth, checksum))
|
||||
return dirchecksums
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
exist = pth.split(":")[1]
|
||||
if exist == "False":
|
||||
continue
|
||||
pth = pth.split(":")[0]
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
for f in glob.glob(pth):
|
||||
if os.path.isdir(f):
|
||||
if not os.path.islink(f):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
if not os.path.islink(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
return checksums
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
import ast
|
||||
import sys
|
||||
import codegen
|
||||
import logging
|
||||
import pickle
|
||||
import bb.pysh as pysh
|
||||
import os.path
|
||||
import bb.utils, bb.data
|
||||
import hashlib
|
||||
from itertools import chain
|
||||
from bb.pysh import pyshyacc, pyshlex, sherrors
|
||||
from pysh import pyshyacc, pyshlex, sherrors
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake.CodeParser')
|
||||
|
||||
def bbhash(s):
|
||||
return hashlib.md5(s.encode("utf-8")).hexdigest()
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
||||
|
||||
|
||||
def check_indent(codestr):
|
||||
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
|
||||
@@ -27,10 +28,6 @@ def check_indent(codestr):
|
||||
return codestr
|
||||
|
||||
if codestr[i-1] == "\t" or codestr[i-1] == " ":
|
||||
if codestr[0] == "\n":
|
||||
# Since we're adding a line, we need to remove one line of any empty padding
|
||||
# to ensure line numbers are correct
|
||||
codestr = codestr[1:]
|
||||
return "if 1:\n" + codestr
|
||||
|
||||
return codestr
|
||||
@@ -67,12 +64,11 @@ class SetCache(object):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(sys.intern(i))
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
h = hash(s)
|
||||
if h in self.setcache:
|
||||
return self.setcache[h]
|
||||
self.setcache[h] = s
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
@@ -96,9 +92,6 @@ class pythonCacheLine(object):
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
def __repr__(self):
|
||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
||||
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
@@ -112,12 +105,10 @@ class shellCacheLine(object):
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
def __repr__(self):
|
||||
return str(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 8
|
||||
CACHE_VERSION = 7
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -148,10 +139,6 @@ class CodeParserCache(MultiProcessCache):
|
||||
return cacheline
|
||||
|
||||
def init_cache(self, d):
|
||||
# Check if we already have the caches
|
||||
if self.pythoncache:
|
||||
return
|
||||
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
@@ -167,11 +154,11 @@ codeparsercache = CodeParserCache()
|
||||
def parser_cache_init(d):
|
||||
codeparsercache.init_cache(d)
|
||||
|
||||
def parser_cache_save():
|
||||
codeparsercache.save_extras()
|
||||
def parser_cache_save(d):
|
||||
codeparsercache.save_extras(d)
|
||||
|
||||
def parser_cache_savemerge():
|
||||
codeparsercache.save_merge()
|
||||
def parser_cache_savemerge(d):
|
||||
codeparsercache.save_merge(d)
|
||||
|
||||
Logger = logging.getLoggerClass()
|
||||
class BufferedLogger(Logger):
|
||||
@@ -191,7 +178,6 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
@@ -211,33 +197,17 @@ class PythonParser():
|
||||
|
||||
def visit_Call(self, node):
|
||||
name = self.called_node_name(node.func)
|
||||
if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs):
|
||||
if name and name.endswith(self.getvars) or name in self.containsfuncs:
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
varname = node.args[0].s
|
||||
if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
|
||||
if varname not in self.contains:
|
||||
self.contains[varname] = set()
|
||||
self.contains[varname].add(node.args[1].s)
|
||||
elif name.endswith(self.getvarflags):
|
||||
if isinstance(node.args[1], ast.Str):
|
||||
self.references.add('%s[%s]' % (varname, node.args[1].s))
|
||||
else:
|
||||
self.warn(node.func, node.args[1])
|
||||
else:
|
||||
self.references.add(varname)
|
||||
else:
|
||||
self.references.add(node.args[0].s)
|
||||
else:
|
||||
self.warn(node.func, node.args[0])
|
||||
elif name and name.endswith(".expand"):
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
value = node.args[0].s
|
||||
d = bb.data.init()
|
||||
parser = d.expandWithRefs(value, self.name)
|
||||
self.references |= parser.references
|
||||
self.execs |= parser.execs
|
||||
for varname in parser.contains:
|
||||
if varname not in self.contains:
|
||||
self.contains[varname] = set()
|
||||
self.contains[varname] |= parser.contains[varname]
|
||||
elif name in self.execfuncs:
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
self.var_execs.add(node.args[0].s)
|
||||
@@ -260,7 +230,6 @@ class PythonParser():
|
||||
break
|
||||
|
||||
def __init__(self, name, log):
|
||||
self.name = name
|
||||
self.var_execs = set()
|
||||
self.contains = {}
|
||||
self.execs = set()
|
||||
@@ -270,11 +239,8 @@ class PythonParser():
|
||||
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
|
||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||
|
||||
def parse_python(self, node, lineno=0, filename="<string>"):
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
h = bbhash(str(node))
|
||||
def parse_python(self, node):
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
@@ -292,9 +258,7 @@ class PythonParser():
|
||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
||||
return
|
||||
|
||||
# We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
|
||||
node = "\n" * int(lineno) + node
|
||||
code = compile(check_indent(str(node)), filename, "exec",
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
ast.PyCF_ONLY_AST)
|
||||
|
||||
for n in ast.walk(code):
|
||||
@@ -319,7 +283,7 @@ class ShellParser():
|
||||
commands it executes.
|
||||
"""
|
||||
|
||||
h = bbhash(str(value))
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
|
||||
@@ -68,12 +68,10 @@ class Command:
|
||||
if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
|
||||
return None, "Not able to execute not readonly commands in readonly mode"
|
||||
try:
|
||||
if getattr(command_method, 'needconfig', False):
|
||||
self.cooker.updateCacheSync()
|
||||
result = command_method(self, commandline)
|
||||
except CommandError as exc:
|
||||
return None, exc.args[0]
|
||||
except (Exception, SystemExit):
|
||||
except Exception:
|
||||
import traceback
|
||||
return None, traceback.format_exc()
|
||||
else:
|
||||
@@ -110,7 +108,7 @@ class Command:
|
||||
return False
|
||||
except SystemExit as exc:
|
||||
arg = exc.args[0]
|
||||
if isinstance(arg, str):
|
||||
if isinstance(arg, basestring):
|
||||
self.finishAsyncCommand(arg)
|
||||
else:
|
||||
self.finishAsyncCommand("Exited with %s" % arg)
|
||||
@@ -181,16 +179,6 @@ class CommandsSync:
|
||||
value = str(params[1])
|
||||
command.cooker.data.setVar(varname, value)
|
||||
|
||||
def getSetVariable(self, command, params):
|
||||
"""
|
||||
Read the value of a variable from data and set it into the datastore
|
||||
which effectively expands and locks the value.
|
||||
"""
|
||||
varname = params[0]
|
||||
result = self.getVariable(command, params)
|
||||
command.cooker.data.setVar(varname, result)
|
||||
return result
|
||||
|
||||
def setConfig(self, command, params):
|
||||
"""
|
||||
Set the value of variable in configuration
|
||||
@@ -216,7 +204,6 @@ class CommandsSync:
|
||||
postfiles = params[1].split()
|
||||
command.cooker.configuration.prefile = prefiles
|
||||
command.cooker.configuration.postfile = postfiles
|
||||
setPrePostConfFiles.needconfig = False
|
||||
|
||||
def getCpuCount(self, command, params):
|
||||
"""
|
||||
@@ -224,12 +211,10 @@ class CommandsSync:
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
getCpuCount.readonly = True
|
||||
getCpuCount.needconfig = False
|
||||
|
||||
def matchFile(self, command, params):
|
||||
fMatch = params[0]
|
||||
return command.cooker.matchFile(fMatch)
|
||||
matchFile.needconfig = False
|
||||
|
||||
def generateNewImage(self, command, params):
|
||||
image = params[0]
|
||||
@@ -243,7 +228,6 @@ class CommandsSync:
|
||||
def ensureDir(self, command, params):
|
||||
directory = params[0]
|
||||
bb.utils.mkdirhier(directory)
|
||||
ensureDir.needconfig = False
|
||||
|
||||
def setVarFile(self, command, params):
|
||||
"""
|
||||
@@ -254,7 +238,6 @@ class CommandsSync:
|
||||
default_file = params[2]
|
||||
op = params[3]
|
||||
command.cooker.modifyConfigurationVar(var, val, default_file, op)
|
||||
setVarFile.needconfig = False
|
||||
|
||||
def removeVarFile(self, command, params):
|
||||
"""
|
||||
@@ -262,7 +245,6 @@ class CommandsSync:
|
||||
"""
|
||||
var = params[0]
|
||||
command.cooker.removeConfigurationVar(var)
|
||||
removeVarFile.needconfig = False
|
||||
|
||||
def createConfigFile(self, command, params):
|
||||
"""
|
||||
@@ -270,7 +252,6 @@ class CommandsSync:
|
||||
"""
|
||||
name = params[0]
|
||||
command.cooker.createConfigFile(name)
|
||||
createConfigFile.needconfig = False
|
||||
|
||||
def setEventMask(self, command, params):
|
||||
handlerNum = params[0]
|
||||
@@ -278,8 +259,6 @@ class CommandsSync:
|
||||
debug_domains = params[2]
|
||||
mask = params[3]
|
||||
return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
|
||||
setEventMask.needconfig = False
|
||||
setEventMask.readonly = True
|
||||
|
||||
def setFeatures(self, command, params):
|
||||
"""
|
||||
@@ -287,7 +266,7 @@ class CommandsSync:
|
||||
"""
|
||||
features = params[0]
|
||||
command.cooker.setFeatures(features)
|
||||
setFeatures.needconfig = False
|
||||
|
||||
# although we change the internal state of the cooker, this is transparent since
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
@@ -296,7 +275,6 @@ class CommandsSync:
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
updateConfig.needconfig = False
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,11 +22,9 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import os, sys
|
||||
from functools import wraps
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
import bb.parse
|
||||
@@ -65,9 +63,9 @@ class ConfigParameters(object):
|
||||
raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
|
||||
|
||||
if not self.options.pkgs_to_build:
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
|
||||
if error:
|
||||
raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
|
||||
raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
@@ -75,8 +73,7 @@ class ConfigParameters(object):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile",
|
||||
"prefile", "postfile"]:
|
||||
"debug_domains", "extra_assume_provided", "profile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
@@ -131,15 +128,12 @@ class CookerConfiguration(object):
|
||||
self.extra_assume_provided = []
|
||||
self.prefile = []
|
||||
self.postfile = []
|
||||
self.prefile_server = []
|
||||
self.postfile_server = []
|
||||
self.debug = 0
|
||||
self.cmd = None
|
||||
self.abort = True
|
||||
self.force = False
|
||||
self.profile = False
|
||||
self.nosetscene = False
|
||||
self.setsceneonly = False
|
||||
self.invalidate_stamp = False
|
||||
self.dump_signatures = []
|
||||
self.dry_run = False
|
||||
@@ -179,26 +173,11 @@ def catch_parse_error(func):
|
||||
def wrapped(fn, *args):
|
||||
try:
|
||||
return func(fn, *args)
|
||||
except IOError as exc:
|
||||
except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
import traceback
|
||||
parselog.critical(traceback.format_exc())
|
||||
parselog.critical( traceback.format_exc())
|
||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
||||
sys.exit(1)
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
import traceback
|
||||
|
||||
bbdir = os.path.dirname(__file__) + os.sep
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
for tb in iter(lambda: tb.tb_next, None):
|
||||
# Skip frames in bitbake itself, we only want the metadata
|
||||
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
|
||||
if not fn.startswith(bbdir):
|
||||
break
|
||||
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
|
||||
sys.exit(1)
|
||||
except bb.parse.ParseError as exc:
|
||||
parselog.critical(str(exc))
|
||||
sys.exit(1)
|
||||
return wrapped
|
||||
|
||||
@catch_parse_error
|
||||
@@ -237,9 +216,9 @@ class CookerDataBuilder(object):
|
||||
|
||||
bb.utils.set_context(bb.utils.clean_context())
|
||||
bb.event.set_class_handlers(bb.event.clean_class_handlers())
|
||||
self.basedata = bb.data.init()
|
||||
self.data = bb.data.init()
|
||||
if self.tracking:
|
||||
self.basedata.enableTracking()
|
||||
self.data.enableTracking()
|
||||
|
||||
# Keep a datastore of the initial environment variables and their
|
||||
# values from when BitBake was launched to enable child processes
|
||||
@@ -250,49 +229,16 @@ class CookerDataBuilder(object):
|
||||
self.savedenv.setVar(k, cookercfg.env[k])
|
||||
|
||||
filtered_keys = bb.utils.approved_variables()
|
||||
bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
|
||||
self.basedata.setVar("BB_ORIGENV", self.savedenv)
|
||||
bb.data.inheritFromOS(self.data, self.savedenv, filtered_keys)
|
||||
self.data.setVar("BB_ORIGENV", self.savedenv)
|
||||
|
||||
if worker:
|
||||
self.basedata.setVar("BB_WORKERCONTEXT", "1")
|
||||
|
||||
self.data = self.basedata
|
||||
self.mcdata = {}
|
||||
self.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
|
||||
def parseBaseConfiguration(self):
|
||||
try:
|
||||
bb.parse.init_parser(self.basedata)
|
||||
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
|
||||
if self.data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(self.data)
|
||||
bb.codeparser.parser_cache_init(self.data)
|
||||
|
||||
bb.event.fire(bb.event.ConfigParsed(), self.data)
|
||||
|
||||
reparse_cnt = 0
|
||||
while self.data.getVar("BB_INVALIDCONF", False) is True:
|
||||
if reparse_cnt > 20:
|
||||
logger.error("Configuration has been re-parsed over 20 times, "
|
||||
"breaking out of the loop...")
|
||||
raise Exception("Too deep config re-parse loop. Check locations where "
|
||||
"BB_INVALIDCONF is being set (ConfigParsed event handlers)")
|
||||
self.data.setVar("BB_INVALIDCONF", False)
|
||||
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
reparse_cnt += 1
|
||||
bb.event.fire(bb.event.ConfigParsed(), self.data)
|
||||
|
||||
bb.parse.init_parser(self.data)
|
||||
self.data_hash = self.data.get_hash()
|
||||
self.mcdata[''] = self.data
|
||||
|
||||
multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split()
|
||||
for config in multiconfig:
|
||||
mcdata = self.parseConfigurationFiles(['conf/multiconfig/%s.conf' % config] + self.prefiles, self.postfiles)
|
||||
bb.event.fire(bb.event.ConfigParsed(), mcdata)
|
||||
self.mcdata[config] = mcdata
|
||||
|
||||
except (SyntaxError, bb.BBHandledException):
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
except SyntaxError:
|
||||
raise bb.BBHandledException
|
||||
except bb.data_smart.ExpansionError as e:
|
||||
logger.error(str(e))
|
||||
@@ -305,7 +251,8 @@ class CookerDataBuilder(object):
|
||||
return findConfigFile("bblayers.conf", data)
|
||||
|
||||
def parseConfigurationFiles(self, prefiles, postfiles):
|
||||
data = bb.data.createCopy(self.basedata)
|
||||
data = self.data
|
||||
bb.parse.init_parser(data)
|
||||
|
||||
# Parse files for loading *before* bitbake.conf and any includes
|
||||
for f in prefiles:
|
||||
@@ -322,24 +269,12 @@ class CookerDataBuilder(object):
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
if not os.path.isdir(layer):
|
||||
parselog.critical("Layer directory '%s' does not exist! "
|
||||
"Please check BBLAYERS in %s" % (layer, layerconf))
|
||||
sys.exit(1)
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
if layer.endswith('/'):
|
||||
layer = layer.rstrip('/')
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data.setVar('LAYERDIR_RE', re.escape(layer))
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
data.expandVarref('LAYERDIR_RE')
|
||||
|
||||
data.delVar('LAYERDIR_RE')
|
||||
data.delVar('LAYERDIR')
|
||||
|
||||
if not data.getVar("BBPATH", True):
|
||||
@@ -363,15 +298,23 @@ class CookerDataBuilder(object):
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
||||
handlerfn = data.getVarFlag(var, "filename", False)
|
||||
if not handlerfn:
|
||||
parselog.critical("Undefined event handler function '%s'" % var)
|
||||
sys.exit(1)
|
||||
handlerln = int(data.getVarFlag(var, "lineno", False))
|
||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
|
||||
if data.getVar("BB_INVALIDCONF") is True:
|
||||
data.setVar("BB_INVALIDCONF", False)
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
return
|
||||
|
||||
bb.parse.init_parser(data)
|
||||
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
|
||||
self.data = data
|
||||
self.data_hash = data.get_hash()
|
||||
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -178,8 +178,8 @@ def createDaemon(function, logfile):
|
||||
# os.dup2(0, 2) # standard error (2)
|
||||
|
||||
|
||||
si = open('/dev/null', 'r')
|
||||
so = open(logfile, 'w')
|
||||
si = file('/dev/null', 'r')
|
||||
so = file(logfile, 'w')
|
||||
se = so
|
||||
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ def setVar(var, value, d):
|
||||
d.setVar(var, value)
|
||||
|
||||
|
||||
def getVar(var, d, exp = False):
|
||||
def getVar(var, d, exp = 0):
|
||||
"""Gets the value of a variable"""
|
||||
return d.getVar(var, exp)
|
||||
|
||||
@@ -107,7 +107,7 @@ def setVarFlag(var, flag, flagvalue, d):
|
||||
|
||||
def getVarFlag(var, flag, d):
|
||||
"""Gets given flag from given var"""
|
||||
return d.getVarFlag(var, flag, False)
|
||||
return d.getVarFlag(var, flag)
|
||||
|
||||
def delVarFlag(var, flag, d):
|
||||
"""Removes a given flag from the variable's flags"""
|
||||
@@ -159,12 +159,13 @@ def expandKeys(alterdata, readdata = None):
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
for key in sorted(todolist):
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
newval = alterdata.getVar(ekey, False)
|
||||
if newval is not None:
|
||||
val = alterdata.getVar(key, False)
|
||||
if val is not None:
|
||||
newval = alterdata.getVar(ekey, 0)
|
||||
if newval:
|
||||
val = alterdata.getVar(key, 0)
|
||||
if val is not None and newval is not None:
|
||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
||||
alterdata.renameVar(key, ekey)
|
||||
|
||||
@@ -174,7 +175,7 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
for s in savedenv.keys():
|
||||
if s in permitted:
|
||||
try:
|
||||
d.setVar(s, savedenv.getVar(s, True), op = 'from env')
|
||||
d.setVar(s, getVar(s, savedenv, True), op = 'from env')
|
||||
if s in exportlist:
|
||||
d.setVarFlag(s, "export", True, op = 'auto env export')
|
||||
except TypeError:
|
||||
@@ -182,39 +183,39 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
func = d.getVarFlag(var, "func", False)
|
||||
if d.getVarFlag(var, 'python', False) and func:
|
||||
return False
|
||||
if getVarFlag(var, "python", d):
|
||||
return 0
|
||||
|
||||
export = d.getVarFlag(var, "export", False)
|
||||
unexport = d.getVarFlag(var, "unexport", False)
|
||||
export = getVarFlag(var, "export", d)
|
||||
unexport = getVarFlag(var, "unexport", d)
|
||||
func = getVarFlag(var, "func", d)
|
||||
if not all and not export and not unexport and not func:
|
||||
return False
|
||||
return 0
|
||||
|
||||
try:
|
||||
if all:
|
||||
oval = d.getVar(var, False)
|
||||
val = d.getVar(var, True)
|
||||
oval = getVar(var, d, 0)
|
||||
val = getVar(var, d, 1)
|
||||
except (KeyboardInterrupt, bb.build.FuncFailed):
|
||||
raise
|
||||
except Exception as exc:
|
||||
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
||||
return False
|
||||
return 0
|
||||
|
||||
if all:
|
||||
d.varhistory.emit(var, oval, val, o, d)
|
||||
d.varhistory.emit(var, oval, val, o)
|
||||
|
||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||
return False
|
||||
return 0
|
||||
|
||||
varExpanded = d.expand(var)
|
||||
varExpanded = expand(var, d)
|
||||
|
||||
if unexport:
|
||||
o.write('unset %s\n' % varExpanded)
|
||||
return False
|
||||
return 0
|
||||
|
||||
if val is None:
|
||||
return False
|
||||
return 0
|
||||
|
||||
val = str(val)
|
||||
|
||||
@@ -223,11 +224,10 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
val = val[3:] # Strip off "() "
|
||||
o.write("%s() %s\n" % (varExpanded, val))
|
||||
o.write("export -f %s\n" % (varExpanded))
|
||||
return True
|
||||
return 1
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
val = val.rstrip('\n')
|
||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||
return 1
|
||||
|
||||
@@ -240,12 +240,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
alter = re.sub('\n', ' \\\n', alter)
|
||||
alter = re.sub('\\$', '\\\\$', alter)
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return False
|
||||
return 0
|
||||
|
||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
isfunc = lambda key: bool(d.getVarFlag(key, "func", False))
|
||||
isfunc = lambda key: bool(d.getVarFlag(key, "func"))
|
||||
keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
|
||||
grouped = groupby(keys, isfunc)
|
||||
for isfunc, keys in grouped:
|
||||
@@ -254,8 +254,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
|
||||
def exported_keys(d):
|
||||
return (key for key in d.keys() if not key.startswith('__') and
|
||||
d.getVarFlag(key, 'export', False) and
|
||||
not d.getVarFlag(key, 'unexport', False))
|
||||
d.getVarFlag(key, 'export') and
|
||||
not d.getVarFlag(key, 'unexport'))
|
||||
|
||||
def exported_vars(d):
|
||||
for key in exported_keys(d):
|
||||
@@ -270,11 +270,10 @@ def exported_vars(d):
|
||||
def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
|
||||
for key in keys:
|
||||
emit_var(key, o, d, False)
|
||||
emit_var(key, o, d, False) and o.write('\n')
|
||||
|
||||
o.write('\n')
|
||||
emit_var(func, o, d, False) and o.write('\n')
|
||||
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
@@ -284,7 +283,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
|
||||
if d.getVarFlag(dep, "func") and not d.getVarFlag(dep, "python"):
|
||||
emit_var(dep, o, d, False) and o.write('\n')
|
||||
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
@@ -298,7 +297,7 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
def write_func(func, o, call = False):
|
||||
body = d.getVar(func, False)
|
||||
body = d.getVar(func, True)
|
||||
if not body.startswith("def"):
|
||||
body = _functionfmt.format(function=func, body=body)
|
||||
|
||||
@@ -308,7 +307,7 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
|
||||
write_func(func, o, True)
|
||||
pp = bb.codeparser.PythonParser(func, logger)
|
||||
pp.parse_python(d.getVar(func, False))
|
||||
pp.parse_python(d.getVar(func, True))
|
||||
newdeps = pp.execs
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
seen = set()
|
||||
@@ -317,10 +316,10 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func", False) and d.getVarFlag(dep, "python", False):
|
||||
if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
|
||||
write_func(dep, o)
|
||||
pp = bb.codeparser.PythonParser(dep, logger)
|
||||
pp.parse_python(d.getVar(dep, False))
|
||||
pp.parse_python(d.getVar(dep, True))
|
||||
newdeps |= pp.execs
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
@@ -339,7 +338,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
return deps, value
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs"]) or {}
|
||||
vardeps = varflags.get("vardeps")
|
||||
value = d.getVar(key, False)
|
||||
|
||||
@@ -362,29 +361,27 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
value = varflags.get("vardepvalue")
|
||||
elif varflags.get("func"):
|
||||
if varflags.get("python"):
|
||||
parsedvar = d.expandWithRefs(value, key)
|
||||
parser = bb.codeparser.PythonParser(key, logger)
|
||||
if value and "\t" in value:
|
||||
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
|
||||
if parsedvar.value and "\t" in parsedvar.value:
|
||||
logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(parsedvar.value)
|
||||
deps = deps | parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
value = handle_contains(value, parser.contains, d)
|
||||
else:
|
||||
parsedvar = d.expandWithRefs(value, key)
|
||||
parser = bb.codeparser.ShellParser(key, logger)
|
||||
parser.parse_shell(parsedvar.value)
|
||||
deps = deps | shelldeps
|
||||
deps = deps | parsedvar.references
|
||||
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
||||
value = handle_contains(value, parsedvar.contains, d)
|
||||
if vardeps is None:
|
||||
parser.log.flush()
|
||||
if "prefuncs" in varflags:
|
||||
deps = deps | set(varflags["prefuncs"].split())
|
||||
if "postfuncs" in varflags:
|
||||
deps = deps | set(varflags["postfuncs"].split())
|
||||
if "exports" in varflags:
|
||||
deps = deps | set(varflags["exports"].split())
|
||||
deps = deps | parsedvar.references
|
||||
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
||||
value = handle_contains(value, parsedvar.contains, d)
|
||||
else:
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
@@ -409,8 +406,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps |= set((vardeps or "").split())
|
||||
deps -= set(varflags.get("vardepsexclude", "").split())
|
||||
except Exception as e:
|
||||
bb.warn("Exception during build_dependencies for %s" % key)
|
||||
raise
|
||||
raise bb.data_smart.ExpansionError(key, None, e)
|
||||
return deps, value
|
||||
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
|
||||
#d.setVarFlag(key, "vardeps", deps)
|
||||
@@ -418,13 +414,13 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
def generate_dependencies(d):
|
||||
|
||||
keys = set(key for key in d if not key.startswith("__"))
|
||||
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
|
||||
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
|
||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
|
||||
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
||||
newdeps = deps[task]
|
||||
@@ -442,7 +438,7 @@ def generate_dependencies(d):
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
val = d.getVar('__inherit_cache', False) or []
|
||||
val = getVar('__inherit_cache', d) or []
|
||||
needle = os.path.join('classes', '%s.bbclass' % klass)
|
||||
for v in val:
|
||||
if v.endswith(needle):
|
||||
|
||||
@@ -40,7 +40,7 @@ logger = logging.getLogger("BitBake.Data")
|
||||
|
||||
__setvar_keyword__ = ["_append", "_prepend", "_remove"]
|
||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}")
|
||||
__expand_python_regexp__ = re.compile(r"\${@.+?}")
|
||||
|
||||
def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
@@ -54,36 +54,27 @@ def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
return
|
||||
# Infer caller's likely values for variable (var) and value (value),
|
||||
# to reduce clutter in the rest of the code.
|
||||
above = None
|
||||
def set_above():
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
try:
|
||||
raise Exception
|
||||
except Exception:
|
||||
tb = sys.exc_info()[2]
|
||||
if parent:
|
||||
return tb.tb_frame.f_back.f_back.f_back
|
||||
above = tb.tb_frame.f_back.f_back
|
||||
else:
|
||||
return tb.tb_frame.f_back.f_back
|
||||
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
if not above:
|
||||
above = set_above()
|
||||
lcls = above.f_locals.items()
|
||||
above = tb.tb_frame.f_back
|
||||
lcls = above.f_locals.items()
|
||||
for k, v in lcls:
|
||||
if k == 'value' and 'detail' not in loginfo:
|
||||
loginfo['detail'] = v
|
||||
if k == 'var' and 'variable' not in loginfo:
|
||||
loginfo['variable'] = v
|
||||
# Infer file/line/function from traceback
|
||||
# Don't use traceback.extract_stack() since it fills the line contents which
|
||||
# we don't need and that hits stat syscalls
|
||||
if 'file' not in loginfo:
|
||||
if not above:
|
||||
above = set_above()
|
||||
f = above.f_back
|
||||
line = f.f_lineno
|
||||
file = f.f_code.co_filename
|
||||
func = f.f_code.co_name
|
||||
depth = 3
|
||||
if parent:
|
||||
depth = 4
|
||||
file, line, func, text = traceback.extract_stack(limit = depth)[0]
|
||||
loginfo['file'] = file
|
||||
loginfo['line'] = line
|
||||
if func not in loginfo:
|
||||
@@ -135,7 +126,7 @@ class VariableParse:
|
||||
self.contains[k] = parser.contains[k].copy()
|
||||
else:
|
||||
self.contains[k].update(parser.contains[k])
|
||||
value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
|
||||
value = utils.better_eval(codeobj, DataContext(self.d))
|
||||
return str(value)
|
||||
|
||||
|
||||
@@ -147,7 +138,7 @@ class DataContext(dict):
|
||||
|
||||
def __missing__(self, key):
|
||||
value = self.metadata.getVar(key, True)
|
||||
if value is None or self.metadata.getVarFlag(key, 'func', False):
|
||||
if value is None or self.metadata.getVarFlag(key, 'func'):
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return value
|
||||
@@ -240,10 +231,6 @@ class VariableHistory(object):
|
||||
|
||||
if var not in self.variables:
|
||||
self.variables[var] = []
|
||||
if not isinstance(self.variables[var], list):
|
||||
return
|
||||
if 'nodups' in loginfo and loginfo in self.variables[var]:
|
||||
return
|
||||
self.variables[var].append(loginfo.copy())
|
||||
|
||||
def variable(self, var):
|
||||
@@ -252,20 +239,8 @@ class VariableHistory(object):
|
||||
else:
|
||||
return []
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
def emit(self, var, oval, val, o):
|
||||
history = self.variable(var)
|
||||
|
||||
# Append override history
|
||||
if var in d.overridedata:
|
||||
for (r, override) in d.overridedata[var]:
|
||||
for event in self.variable(r):
|
||||
loginfo = event.copy()
|
||||
if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
|
||||
continue
|
||||
loginfo['variable'] = var
|
||||
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
|
||||
history.append(loginfo)
|
||||
|
||||
commentVal = re.sub('\n', '\n#', str(oval))
|
||||
if history:
|
||||
if len(history) == 1:
|
||||
@@ -312,31 +287,6 @@ class VariableHistory(object):
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
def get_variable_items_files(self, var, d):
|
||||
"""
|
||||
Use variable history to map items added to a list variable and
|
||||
the files in which they were added.
|
||||
"""
|
||||
history = self.variable(var)
|
||||
finalitems = (d.getVar(var, True) or '').split()
|
||||
filemap = {}
|
||||
isset = False
|
||||
for event in history:
|
||||
if 'flag' in event:
|
||||
continue
|
||||
if event['op'] == '_remove':
|
||||
continue
|
||||
if isset and event['op'] == 'set?':
|
||||
continue
|
||||
isset = True
|
||||
items = d.expand(event['detail']).split()
|
||||
for item in items:
|
||||
# This is a little crude but is belt-and-braces to avoid us
|
||||
# having to handle every possible operation type specifically
|
||||
if item in finalitems and not item in filemap:
|
||||
filemap[item] = event['file']
|
||||
return filemap
|
||||
|
||||
def del_var_history(self, var, f=None, line=None):
|
||||
"""If file f and line are not given, the entire history of var is deleted"""
|
||||
if var in self.variables:
|
||||
@@ -346,23 +296,23 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self):
|
||||
def __init__(self, special = None, seen = None ):
|
||||
self.dict = {}
|
||||
|
||||
if special is None:
|
||||
special = COWDictBase.copy()
|
||||
if seen is None:
|
||||
seen = COWDictBase.copy()
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
# cookie monster tribute
|
||||
# Need to be careful about writes to overridedata as
|
||||
# its only a shallow copy, could influence other data store
|
||||
# copies!
|
||||
self.overridedata = {}
|
||||
self.overrides = None
|
||||
self.overridevars = set(["OVERRIDES", "FILE"])
|
||||
self.inoverride = False
|
||||
self._special_values = special
|
||||
self._seen_overrides = seen
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
def enableTracking(self):
|
||||
self._tracking = True
|
||||
@@ -372,7 +322,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def expandWithRefs(self, s, varname):
|
||||
|
||||
if not isinstance(s, str): # sanity check
|
||||
if not isinstance(s, basestring): # sanity check
|
||||
return VariableParse(varname, self, s)
|
||||
|
||||
if varname and varname in self.expand_cache:
|
||||
@@ -384,12 +334,7 @@ class DataSmart(MutableMapping):
|
||||
olds = s
|
||||
try:
|
||||
s = __expand_var_regexp__.sub(varparse.var_sub, s)
|
||||
try:
|
||||
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
||||
except SyntaxError as e:
|
||||
# Likely unmatched brackets, just don't expand the expression
|
||||
if e.msg != "EOL while scanning string literal":
|
||||
raise
|
||||
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
||||
if s == olds:
|
||||
break
|
||||
except ExpansionError:
|
||||
@@ -397,7 +342,7 @@ class DataSmart(MutableMapping):
|
||||
except bb.parse.SkipRecipe:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise ExpansionError(varname, s, exc) from exc
|
||||
raise ExpansionError(varname, s, exc)
|
||||
|
||||
varparse.value = s
|
||||
|
||||
@@ -409,34 +354,97 @@ class DataSmart(MutableMapping):
|
||||
def expand(self, s, varname = None):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
def finalize(self, parent = False):
|
||||
return
|
||||
|
||||
def internal_finalize(self, parent = False):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
self.overrides = None
|
||||
|
||||
def need_overrides(self):
|
||||
if self.overrides is not None:
|
||||
return
|
||||
if self.inoverride:
|
||||
return
|
||||
for count in range(5):
|
||||
self.inoverride = True
|
||||
# Can end up here recursively so setup dummy values
|
||||
self.overrides = []
|
||||
self.overridesset = set()
|
||||
self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
self.overridesset = set(self.overrides)
|
||||
self.inoverride = False
|
||||
self.expand_cache = {}
|
||||
newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
if newoverrides == self.overrides:
|
||||
break
|
||||
self.overrides = newoverrides
|
||||
self.overridesset = set(self.overrides)
|
||||
else:
|
||||
bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
|
||||
overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
finalize_caller = {
|
||||
'op': 'finalize',
|
||||
}
|
||||
infer_caller_details(finalize_caller, parent = parent, varval = False)
|
||||
|
||||
#
|
||||
# Well let us see what breaks here. We used to iterate
|
||||
# over each variable and apply the override and then
|
||||
# do the line expanding.
|
||||
# If we have bad luck - which we will have - the keys
|
||||
# where in some order that is so important for this
|
||||
# method which we don't have anymore.
|
||||
# Anyway we will fix that and write test cases this
|
||||
# time.
|
||||
|
||||
#
|
||||
# First we apply all overrides
|
||||
# Then we will handle _append and _prepend and store the _remove
|
||||
# information for later.
|
||||
#
|
||||
|
||||
# We only want to report finalization once per variable overridden.
|
||||
finalizes_reported = {}
|
||||
|
||||
for o in overrides:
|
||||
# calculate '_'+override
|
||||
l = len(o) + 1
|
||||
|
||||
# see if one should even try
|
||||
if o not in self._seen_overrides:
|
||||
continue
|
||||
|
||||
vars = self._seen_overrides[o].copy()
|
||||
for var in vars:
|
||||
name = var[:-l]
|
||||
try:
|
||||
# Report only once, even if multiple changes.
|
||||
if name not in finalizes_reported:
|
||||
finalizes_reported[name] = True
|
||||
finalize_caller['variable'] = name
|
||||
finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
|
||||
self.varhistory.record(**finalize_caller)
|
||||
# Copy history of the override over.
|
||||
for event in self.varhistory.variable(var):
|
||||
loginfo = event.copy()
|
||||
loginfo['variable'] = name
|
||||
loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
|
||||
self.delVar(var)
|
||||
except Exception:
|
||||
logger.info("Untracked delVar")
|
||||
|
||||
# now on to the appends and prepends, and stashing the removes
|
||||
for op in __setvar_keyword__:
|
||||
if op in self._special_values:
|
||||
appends = self._special_values[op] or []
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
if op == "_append":
|
||||
sval = self.getVar(append, False) or ""
|
||||
sval += a
|
||||
self.setVar(append, sval)
|
||||
elif op == "_prepend":
|
||||
sval = a + (self.getVar(append, False) or "")
|
||||
self.setVar(append, sval)
|
||||
elif op == "_remove":
|
||||
removes = self.getVarFlag(append, "_removeactive", False) or []
|
||||
removes.extend(a.split())
|
||||
self.setVarFlag(append, "_removeactive", removes, ignore=True)
|
||||
|
||||
# We save overrides that may be applied at some later stage
|
||||
if keep:
|
||||
self.setVarFlag(append, op, keep, ignore=True)
|
||||
else:
|
||||
self.delVarFlag(append, op, ignore=True)
|
||||
|
||||
def initVar(self, var):
|
||||
self.expand_cache = {}
|
||||
@@ -467,10 +475,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def setVar(self, var, value, **loginfo):
|
||||
#print("var=" + str(var) + " val=" + str(value))
|
||||
parsing=False
|
||||
if 'parsing' in loginfo:
|
||||
parsing=True
|
||||
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
self.expand_cache = {}
|
||||
@@ -479,7 +483,7 @@ class DataSmart(MutableMapping):
|
||||
base = match.group('base')
|
||||
keyword = match.group("keyword")
|
||||
override = match.group('add')
|
||||
l = self.getVarFlag(base, keyword, False) or []
|
||||
l = self.getVarFlag(base, keyword) or []
|
||||
l.append([value, override])
|
||||
self.setVarFlag(base, keyword, l, ignore=True)
|
||||
# And cause that to be recorded:
|
||||
@@ -492,111 +496,65 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
# todo make sure keyword is not __doc__ or __module__
|
||||
# pay the cookie monster
|
||||
try:
|
||||
self._special_values[keyword].add(base)
|
||||
except KeyError:
|
||||
self._special_values[keyword] = set()
|
||||
self._special_values[keyword].add(base)
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(base, **loginfo)
|
||||
|
||||
if base in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
return
|
||||
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if not parsing:
|
||||
if "_append" in self.dict[var]:
|
||||
del self.dict[var]["_append"]
|
||||
if "_prepend" in self.dict[var]:
|
||||
del self.dict[var]["_prepend"]
|
||||
if var in self.overridedata:
|
||||
active = []
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
active.append(r)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active.append(r)
|
||||
for a in active:
|
||||
self.delVar(a)
|
||||
del self.overridedata[var]
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
# setting var
|
||||
self.dict[var]["_content"] = value
|
||||
self.varhistory.record(**loginfo)
|
||||
|
||||
if var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
|
||||
def _setvar_update_overridevars(self, var, value):
|
||||
vardata = self.expandWithRefs(value, var)
|
||||
new = vardata.references
|
||||
new.update(vardata.contains.keys())
|
||||
while not new.issubset(self.overridevars):
|
||||
nextnew = set()
|
||||
self.overridevars.update(new)
|
||||
for i in new:
|
||||
vardata = self.expandWithRefs(self.getVar(i, True), i)
|
||||
nextnew.update(vardata.references)
|
||||
nextnew.update(vardata.contains.keys())
|
||||
new = nextnew
|
||||
self.internal_finalize(True)
|
||||
|
||||
def _setvar_update_overrides(self, var, **loginfo):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
if shortvar not in self.overridedata:
|
||||
self.overridedata[shortvar] = []
|
||||
if [var, override] not in self.overridedata[shortvar]:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].append([var, override])
|
||||
while override:
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
|
||||
def getVar(self, var, expand, noweakdefault=False, parsing=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
|
||||
def renameVar(self, key, newkey, **loginfo):
|
||||
"""
|
||||
Rename the variable key to newkey
|
||||
"""
|
||||
val = self.getVar(key, 0, parsing=True)
|
||||
val = self.getVar(key, 0)
|
||||
if val is not None:
|
||||
loginfo['variable'] = newkey
|
||||
loginfo['op'] = 'rename from %s' % key
|
||||
loginfo['detail'] = val
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(newkey, val, ignore=True, parsing=True)
|
||||
self.setVar(newkey, val, ignore=True)
|
||||
|
||||
for i in (__setvar_keyword__):
|
||||
src = self.getVarFlag(key, i, False)
|
||||
src = self.getVarFlag(key, i)
|
||||
if src is None:
|
||||
continue
|
||||
|
||||
dest = self.getVarFlag(newkey, i, False) or []
|
||||
dest = self.getVarFlag(newkey, i) or []
|
||||
dest.extend(src)
|
||||
self.setVarFlag(newkey, i, dest, ignore=True)
|
||||
|
||||
if key in self.overridedata:
|
||||
self.overridedata[newkey] = []
|
||||
for (v, o) in self.overridedata[key]:
|
||||
self.overridedata[newkey].append([v.replace(key, newkey), o])
|
||||
self.renameVar(v, v.replace(key, newkey))
|
||||
|
||||
if '_' in newkey and val is None:
|
||||
self._setvar_update_overrides(newkey, **loginfo)
|
||||
if i in self._special_values and key in self._special_values[i]:
|
||||
self._special_values[i].remove(key)
|
||||
self._special_values[i].add(newkey)
|
||||
|
||||
loginfo['variable'] = key
|
||||
loginfo['op'] = 'rename (to)'
|
||||
@@ -607,12 +565,14 @@ class DataSmart(MutableMapping):
|
||||
def appendVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'append'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_append", value, ignore=True, parsing=True)
|
||||
newvalue = (self.getVar(var, False) or "") + value
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def prependVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'prepend'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
|
||||
newvalue = value + (self.getVar(var, False) or "")
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def delVar(self, var, **loginfo):
|
||||
loginfo['detail'] = ""
|
||||
@@ -620,28 +580,12 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
if var in self.overridedata:
|
||||
del self.overridedata[var]
|
||||
if '_' in var:
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
try:
|
||||
if shortvar in self.overridedata:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].remove([var, override])
|
||||
except ValueError as e:
|
||||
pass
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if override and override in self._seen_overrides and var in self._seen_overrides[override]:
|
||||
self._seen_overrides[override].remove(var)
|
||||
|
||||
def setVarFlag(self, var, flag, value, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
loginfo['flag'] = flag
|
||||
@@ -651,9 +595,7 @@ class DataSmart(MutableMapping):
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
if flag == "_defaultval" and var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
if not "__exportlist" in self.dict:
|
||||
@@ -662,71 +604,14 @@ class DataSmart(MutableMapping):
|
||||
self.dict["__exportlist"]["_content"] = set()
|
||||
self.dict["__exportlist"]["_content"].add(var)
|
||||
|
||||
def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False):
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
|
||||
local_var = self._findVar(var)
|
||||
value = None
|
||||
if flag == "_content" and var in self.overridedata and not parsing:
|
||||
match = False
|
||||
active = {}
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
# What about double overrides both with "_" in the name?
|
||||
if o in self.overridesset:
|
||||
active[o] = r
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active[o] = r
|
||||
|
||||
mod = True
|
||||
while mod:
|
||||
mod = False
|
||||
for o in self.overrides:
|
||||
for a in active.copy():
|
||||
if a.endswith("_" + o):
|
||||
t = active[a]
|
||||
del active[a]
|
||||
active[a.replace("_" + o, "")] = t
|
||||
mod = True
|
||||
elif a == o:
|
||||
match = active[a]
|
||||
del active[a]
|
||||
if match:
|
||||
value = self.getVar(match, False)
|
||||
|
||||
if local_var is not None and value is None:
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
|
||||
|
||||
if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_append"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = value + r
|
||||
|
||||
if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_prepend"]:
|
||||
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = r + value
|
||||
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -735,30 +620,19 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_remove" in local_var:
|
||||
removes = []
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_remove"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
removes.extend(self.expand(r).split())
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
removes = [self.expand(r).split() for r in local_var["_removeactive"]]
|
||||
removes = reduce(lambda a, b: a+b, removes, [])
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
value = " ".join(filtered)
|
||||
if expand and var in self.expand_cache:
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
# flag == "_content" here
|
||||
self.expand_cache[var].value = value
|
||||
return value
|
||||
|
||||
def delVarFlag(self, var, flag, **loginfo):
|
||||
self.expand_cache = {}
|
||||
local_var = self._findVar(var)
|
||||
if not local_var:
|
||||
return
|
||||
@@ -788,7 +662,6 @@ class DataSmart(MutableMapping):
|
||||
self.setVarFlag(var, flag, newvalue, ignore=True)
|
||||
|
||||
def setVarFlags(self, var, flags, **loginfo):
|
||||
self.expand_cache = {}
|
||||
infer_caller_details(loginfo)
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -818,7 +691,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
|
||||
def delVarFlags(self, var, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
@@ -836,12 +708,13 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
|
||||
def createCopy(self):
|
||||
"""
|
||||
Create a copy of self by setting _data to self
|
||||
"""
|
||||
# we really want this to be a DataSmart...
|
||||
data = DataSmart()
|
||||
data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
|
||||
data.dict["_data"] = self.dict
|
||||
data.varhistory = self.varhistory.copy()
|
||||
data.varhistory.datasmart = data
|
||||
@@ -849,12 +722,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
data._tracking = self._tracking
|
||||
|
||||
data.overrides = None
|
||||
data.overridevars = copy.copy(self.overridevars)
|
||||
# Should really be a deepcopy but has heavy overhead.
|
||||
# Instead, we're careful with writes.
|
||||
data.overridedata = copy.copy(self.overridedata)
|
||||
|
||||
return data
|
||||
|
||||
def expandVarref(self, variable, parents=False):
|
||||
@@ -880,7 +747,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
overrides = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
@@ -888,8 +754,6 @@ class DataSmart(MutableMapping):
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if key in overrides:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
@@ -900,23 +764,11 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return klist
|
||||
|
||||
self.need_overrides()
|
||||
for var in self.overridedata:
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
overrides.add(var)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
overrides.add(var)
|
||||
|
||||
for k in keylist(self.dict):
|
||||
yield k
|
||||
|
||||
for k in overrides:
|
||||
yield k
|
||||
|
||||
def __len__(self):
|
||||
return len(frozenset(iter(self)))
|
||||
return len(frozenset(self))
|
||||
|
||||
def __getitem__(self, item):
|
||||
value = self.getVar(item, False)
|
||||
@@ -961,8 +813,8 @@ class DataSmart(MutableMapping):
|
||||
|
||||
if key == "__BBANONFUNCS":
|
||||
for i in bb_list:
|
||||
value = d.getVar(i, False) or ""
|
||||
value = d.getVar(i, True) or ""
|
||||
data.update({i:value})
|
||||
|
||||
data_str = str([(k, data[k]) for k in sorted(data.keys())])
|
||||
return hashlib.md5(data_str.encode("utf-8")).hexdigest()
|
||||
return hashlib.md5(data_str).hexdigest()
|
||||
|
||||
@@ -24,11 +24,13 @@ BitBake build tools.
|
||||
|
||||
import os, sys
|
||||
import warnings
|
||||
import pickle
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import logging
|
||||
import atexit
|
||||
import traceback
|
||||
import ast
|
||||
import bb.utils
|
||||
import bb.compat
|
||||
import bb.exceptions
|
||||
@@ -67,19 +69,9 @@ _ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
_uiready = False
|
||||
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in builtins:
|
||||
builtins['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
@@ -95,8 +87,6 @@ def execute_handler(name, handler, event, d):
|
||||
raise
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del builtins['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -104,7 +94,7 @@ def fire_class_handlers(event, d):
|
||||
|
||||
eid = str(event.__class__)[8:-2]
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in list(_handlers.items()):
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
@@ -117,36 +107,29 @@ def print_ui_queue():
|
||||
"""If we're exiting before a UI has been spawned, display any queued
|
||||
LogRecords to the console."""
|
||||
logger = logging.getLogger("BitBake")
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
from bb.msg import BBLogFormatter
|
||||
stdout = logging.StreamHandler(sys.stdout)
|
||||
stderr = logging.StreamHandler(sys.stderr)
|
||||
formatter = BBLogFormatter("%(levelname)s: %(message)s")
|
||||
stdout.setFormatter(formatter)
|
||||
stderr.setFormatter(formatter)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||
logger.handlers = [console]
|
||||
|
||||
# First check to see if we have any proper messages
|
||||
msgprint = False
|
||||
for event in ui_queue:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno > logging.DEBUG:
|
||||
if event.levelno >= logging.WARNING:
|
||||
logger.handlers = [stderr]
|
||||
else:
|
||||
logger.handlers = [stdout]
|
||||
logger.handle(event)
|
||||
msgprint = True
|
||||
if msgprint:
|
||||
return
|
||||
|
||||
# Nope, so just print all of the messages we have (including debug messages)
|
||||
logger.handlers = [stdout]
|
||||
for event in ui_queue:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
|
||||
def fire_ui_handlers(event, d):
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
# No UI handlers registered yet, queue up the messages
|
||||
ui_queue.append(event)
|
||||
return
|
||||
@@ -187,7 +170,7 @@ def fire_from_worker(event, d):
|
||||
fire_ui_handlers(event, d)
|
||||
|
||||
noop = lambda _: None
|
||||
def register(name, handler, mask=None, filename=None, lineno=None):
|
||||
def register(name, handler, mask=[]):
|
||||
"""Register an Event handler"""
|
||||
|
||||
# already registered
|
||||
@@ -196,18 +179,10 @@ def register(name, handler, mask=None, filename=None, lineno=None):
|
||||
|
||||
if handler is not None:
|
||||
# handle string containing python code
|
||||
if isinstance(handler, str):
|
||||
if isinstance(handler, basestring):
|
||||
tmp = "def %s(e):\n%s" % (name, handler)
|
||||
try:
|
||||
code = bb.methodpool.compile_cache(tmp)
|
||||
if not code:
|
||||
if filename is None:
|
||||
filename = "%s(e)" % name
|
||||
code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
|
||||
if lineno is not None:
|
||||
ast.increment_lineno(code, lineno-1)
|
||||
code = compile(code, filename, "exec")
|
||||
bb.methodpool.compile_cache_add(tmp, code)
|
||||
code = compile(tmp, "%s(e)" % name, "exec")
|
||||
except SyntaxError:
|
||||
logger.error("Unable to register event handler '%s':\n%s", name,
|
||||
''.join(traceback.format_exc(limit=0)))
|
||||
@@ -234,21 +209,11 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def get_handlers():
|
||||
return _handlers
|
||||
|
||||
def set_handlers(handlers):
|
||||
global _handlers
|
||||
_handlers = handlers
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler, mainui=False):
|
||||
if mainui:
|
||||
global _uiready
|
||||
_uiready = True
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
level, debug_domains = bb.msg.constructLogOptions()
|
||||
@@ -389,11 +354,7 @@ class BuildBase(Event):
|
||||
|
||||
|
||||
|
||||
class BuildInit(BuildBase):
|
||||
"""buildFile or buildTargets was invoked"""
|
||||
def __init__(self, p=[]):
|
||||
name = None
|
||||
BuildBase.__init__(self, name, p)
|
||||
|
||||
|
||||
class BuildStarted(BuildBase, OperationStarted):
|
||||
"""bbmake build run started"""
|
||||
@@ -403,12 +364,11 @@ class BuildStarted(BuildBase, OperationStarted):
|
||||
|
||||
class BuildCompleted(BuildBase, OperationCompleted):
|
||||
"""bbmake build run completed"""
|
||||
def __init__(self, total, n, p, failures=0, interrupted=0):
|
||||
def __init__(self, total, n, p, failures = 0):
|
||||
if not failures:
|
||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
||||
else:
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
self._interrupted = interrupted
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class DiskFull(Event):
|
||||
@@ -423,7 +383,7 @@ class DiskFull(Event):
|
||||
class NoProvider(Event):
|
||||
"""No Provider for an Event"""
|
||||
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]):
|
||||
Event.__init__(self)
|
||||
self._item = item
|
||||
self._runtime = runtime
|
||||
@@ -537,16 +497,6 @@ class TargetsTreeGenerated(Event):
|
||||
Event.__init__(self)
|
||||
self._model = model
|
||||
|
||||
class ReachableStamps(Event):
|
||||
"""
|
||||
An event listing all stamps reachable after parsing
|
||||
which the metadata may use to clean up stale data
|
||||
"""
|
||||
|
||||
def __init__(self, stamps):
|
||||
Event.__init__(self)
|
||||
self.stamps = stamps
|
||||
|
||||
class FilesMatchingFound(Event):
|
||||
"""
|
||||
Event when a list of files matching the supplied pattern has
|
||||
@@ -624,10 +574,7 @@ class LogHandler(logging.Handler):
|
||||
etype, value, tb = record.exc_info
|
||||
if hasattr(tb, 'tb_next'):
|
||||
tb = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
# Need to turn the value into something the logging system can pickle
|
||||
record.bb_exc_info = (etype, value, tb)
|
||||
record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
value = str(value)
|
||||
record.exc_info = None
|
||||
fire(record, None)
|
||||
|
||||
@@ -658,33 +605,6 @@ class MetadataEvent(Event):
|
||||
self.type = eventtype
|
||||
self._localdata = eventdata
|
||||
|
||||
class ProcessStarted(Event):
|
||||
"""
|
||||
Generic process started event (usually part of the initial startup)
|
||||
where further progress events will be delivered
|
||||
"""
|
||||
def __init__(self, processname, total):
|
||||
Event.__init__(self)
|
||||
self.processname = processname
|
||||
self.total = total
|
||||
|
||||
class ProcessProgress(Event):
|
||||
"""
|
||||
Generic process progress event (usually part of the initial startup)
|
||||
"""
|
||||
def __init__(self, processname, progress):
|
||||
Event.__init__(self)
|
||||
self.processname = processname
|
||||
self.progress = progress
|
||||
|
||||
class ProcessFinished(Event):
|
||||
"""
|
||||
Generic process finished event (usually part of the initial startup)
|
||||
"""
|
||||
def __init__(self, processname):
|
||||
Event.__init__(self)
|
||||
self.processname = processname
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to run sanity checks, either raise errors or generate events as return status.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
import inspect
|
||||
import traceback
|
||||
import bb.namedtuple_with_abc
|
||||
@@ -86,6 +86,6 @@ def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
|
||||
|
||||
def to_string(exc):
|
||||
if isinstance(exc, SystemExit):
|
||||
if not isinstance(exc.code, str):
|
||||
if not isinstance(exc.code, basestring):
|
||||
return 'Exited with "%d"' % exc.code
|
||||
return str(exc)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -88,25 +88,28 @@ class Bzr(FetchMethod):
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||
logger.debug(1, "BZR Update %s", ud.url)
|
||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||
runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
|
||||
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||
runfetchcmd(bzrcmd, d)
|
||||
else:
|
||||
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||
logger.debug(1, "BZR Checkout %s", ud.url)
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", bzrcmd)
|
||||
runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
|
||||
runfetchcmd(bzrcmd, d)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
|
||||
tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
|
||||
d, cleanup=[ud.localpath], workdir=ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
@@ -9,7 +9,7 @@ Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
@@ -113,7 +113,7 @@ class ClearCase(FetchMethod):
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV", False)
|
||||
ud.label = d.getVar("SRCREV")
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
@@ -202,10 +202,11 @@ class ClearCase(FetchMethod):
|
||||
|
||||
def _remove_view(self, ud, d):
|
||||
if os.path.exists(ud.viewdir):
|
||||
os.chdir(ud.ccasedir)
|
||||
cmd = self._build_ccase_command(ud, 'rmview');
|
||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("rmview output: %s", output)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
@@ -240,10 +241,11 @@ class ClearCase(FetchMethod):
|
||||
raise e
|
||||
|
||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
||||
os.chdir(ud.viewdir)
|
||||
cmd = self._build_ccase_command(ud, 'setcs');
|
||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d, workdir=ud.viewdir)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("%s", output)
|
||||
|
||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
||||
|
||||
@@ -123,23 +123,22 @@ class Cvs(FetchMethod):
|
||||
pkg = d.getVar('PN', True)
|
||||
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
|
||||
moddir = os.path.join(pkgdir, localdir)
|
||||
workdir = None
|
||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||
logger.info("Update " + ud.url)
|
||||
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
|
||||
# update sources there
|
||||
workdir = moddir
|
||||
os.chdir(moddir)
|
||||
cmd = cvsupdatecmd
|
||||
else:
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(pkgdir)
|
||||
workdir = pkgdir
|
||||
os.chdir(pkgdir)
|
||||
logger.debug(1, "Running %s", cvscmd)
|
||||
bb.fetch2.check_network_access(d, cvscmd, ud.url)
|
||||
cmd = cvscmd
|
||||
|
||||
runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
|
||||
runfetchcmd(cmd, d, cleanup = [moddir])
|
||||
|
||||
if not os.access(moddir, os.R_OK):
|
||||
raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
|
||||
@@ -148,18 +147,18 @@ class Cvs(FetchMethod):
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude='CVS'"
|
||||
tar_flags = "--exclude 'CVS'"
|
||||
|
||||
# tar them up to a defined filename
|
||||
workdir = None
|
||||
if 'fullpath' in ud.parm:
|
||||
workdir = pkgdir
|
||||
os.chdir(pkgdir)
|
||||
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
|
||||
else:
|
||||
workdir = os.path.dirname(os.path.realpath(moddir))
|
||||
os.chdir(moddir)
|
||||
os.chdir('..')
|
||||
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
|
||||
|
||||
runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
|
||||
runfetchcmd(cmd, d, cleanup = [ud.localpath])
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean CVS Files and tarballs """
|
||||
|
||||
@@ -49,10 +49,6 @@ Supported SRC_URI options are:
|
||||
referring to commit which is valid in tag instead of branch.
|
||||
The default is "0", set nobranch=1 if needed.
|
||||
|
||||
- usehead
|
||||
For local git:// urls to use the current branch HEAD as the revsion for use with
|
||||
AUTOREV. Implies nobranch.
|
||||
|
||||
"""
|
||||
|
||||
#Copyright (C) 2005 Richard Purdie
|
||||
@@ -70,58 +66,14 @@ Supported SRC_URI options are:
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
import errno
|
||||
import bb.progress
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
|
||||
|
||||
class GitProgressHandler(bb.progress.LineFilterProgressHandler):
|
||||
"""Extract progress information from git output"""
|
||||
def __init__(self, d):
|
||||
self._buffer = ''
|
||||
self._count = 0
|
||||
super(GitProgressHandler, self).__init__(d)
|
||||
# Send an initial progress event so the bar gets shown
|
||||
self._fire_progress(-1)
|
||||
|
||||
def write(self, string):
|
||||
self._buffer += string
|
||||
stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
|
||||
stage_weights = [0.2, 0.05, 0.5, 0.25]
|
||||
stagenum = 0
|
||||
for i, stage in reversed(list(enumerate(stages))):
|
||||
if stage in self._buffer:
|
||||
stagenum = i
|
||||
self._buffer = ''
|
||||
break
|
||||
self._status = stages[stagenum]
|
||||
percs = re.findall(r'(\d+)%', string)
|
||||
if percs:
|
||||
progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
|
||||
rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
|
||||
if rates:
|
||||
rate = rates[-1]
|
||||
else:
|
||||
rate = None
|
||||
self.update(progress, rate)
|
||||
else:
|
||||
if stagenum == 0:
|
||||
percs = re.findall(r': (\d+)', string)
|
||||
if percs:
|
||||
count = int(percs[-1])
|
||||
if count > self._count:
|
||||
self._count = count
|
||||
self._fire_progress(-count)
|
||||
super(GitProgressHandler, self).write(string)
|
||||
|
||||
|
||||
class Git(FetchMethod):
|
||||
"""Class to fetch a module or modules from git repositories"""
|
||||
def init(self, d):
|
||||
@@ -157,13 +109,6 @@ class Git(FetchMethod):
|
||||
|
||||
ud.nobranch = ud.parm.get("nobranch","0") == "1"
|
||||
|
||||
# usehead implies nobranch
|
||||
ud.usehead = ud.parm.get("usehead","0") == "1"
|
||||
if ud.usehead:
|
||||
if ud.proto != "file":
|
||||
raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
|
||||
ud.nobranch = 1
|
||||
|
||||
# bareclone implies nocheckout
|
||||
ud.bareclone = ud.parm.get("bareclone","0") == "1"
|
||||
if ud.bareclone:
|
||||
@@ -179,9 +124,6 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = branch
|
||||
ud.unresolvedrev[name] = branch
|
||||
|
||||
if ud.usehead:
|
||||
ud.unresolvedrev['default'] = 'HEAD'
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||
@@ -195,10 +137,7 @@ class Git(FetchMethod):
|
||||
ud.unresolvedrev[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
if gitsrcname.startswith('.'):
|
||||
gitsrcname = gitsrcname[1:]
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -219,8 +158,9 @@ class Git(FetchMethod):
|
||||
def need_update(self, ud, d):
|
||||
if not os.path.exists(ud.clonedir):
|
||||
return True
|
||||
os.chdir(ud.clonedir)
|
||||
for name in ud.names:
|
||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
||||
if not self._contains_ref(ud, d, name):
|
||||
return True
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
return True
|
||||
@@ -238,62 +178,67 @@ class Git(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
repourl = self._get_repo_url(ud)
|
||||
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
# We do this since git will use a "-l" option automatically for local urls where possible
|
||||
if repourl.startswith("file://"):
|
||||
repourl = repourl[7:]
|
||||
clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
|
||||
clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
progresshandler = GitProgressHandler(d)
|
||||
runfetchcmd(clone_cmd, d, log=progresshandler)
|
||||
runfetchcmd(clone_cmd, d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
# Update the checkout if needed
|
||||
needupdate = False
|
||||
for name in ud.names:
|
||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
||||
if not self._contains_ref(ud, d, name):
|
||||
needupdate = True
|
||||
if needupdate:
|
||||
try:
|
||||
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
|
||||
runfetchcmd("%s remote rm origin" % ud.basecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
logger.debug(1, "No Origin")
|
||||
|
||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
|
||||
fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
|
||||
fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
progresshandler = GitProgressHandler(d)
|
||||
runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
ud.repochanged = True
|
||||
os.chdir(ud.clonedir)
|
||||
for name in ud.names:
|
||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
||||
if not self._contains_ref(ud, d, name):
|
||||
raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
logger.info("Creating tarball of git repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
""" unpack the downloaded src to destdir"""
|
||||
@@ -315,23 +260,30 @@ class Git(FetchMethod):
|
||||
if ud.bareclone:
|
||||
cloneflags += " --mirror"
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
|
||||
repourl = self._get_repo_url(ud)
|
||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
|
||||
if not ud.nocheckout:
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
|
||||
workdir=destdir)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
|
||||
elif not ud.nobranch:
|
||||
branchname = ud.branches[ud.names[0]]
|
||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
||||
ud.revisions[ud.names[0]]), d, workdir=destdir)
|
||||
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
|
||||
branchname), d, workdir=destdir)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
|
||||
# Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
|
||||
# and you end up with some horrible union of the two when you attempt to clone it
|
||||
# The least invasive workaround seems to be a symlink to the real directory to
|
||||
# fool git into ignoring any .git version that may also be present.
|
||||
#
|
||||
# The issue is fixed in more recent versions of git so we can drop this hack in future
|
||||
# when that version becomes common enough.
|
||||
clonedir = ud.clonedir
|
||||
if not ud.path.endswith(".git"):
|
||||
indirectiondir = destdir[:-1] + ".indirectionsymlink"
|
||||
if os.path.exists(indirectiondir):
|
||||
os.remove(indirectiondir)
|
||||
bb.utils.mkdirhier(os.path.dirname(indirectiondir))
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||
return True
|
||||
|
||||
def clean(self, ud, d):
|
||||
@@ -344,7 +296,7 @@ class Git(FetchMethod):
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def _contains_ref(self, ud, d, name, wd):
|
||||
def _contains_ref(self, ud, d, name):
|
||||
cmd = ""
|
||||
if ud.nobranch:
|
||||
cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
|
||||
@@ -353,23 +305,13 @@ class Git(FetchMethod):
|
||||
cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
|
||||
ud.basecmd, ud.revisions[name], ud.branches[name])
|
||||
try:
|
||||
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
|
||||
output = runfetchcmd(cmd, d, quiet=True)
|
||||
except bb.fetch2.FetchError:
|
||||
return False
|
||||
if len(output.split()) > 1:
|
||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _get_repo_url(self, ud):
|
||||
"""
|
||||
Return the repository URL
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
@@ -380,9 +322,13 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Run git ls-remote with the specified search string
|
||||
"""
|
||||
repourl = self._get_repo_url(ud)
|
||||
cmd = "%s ls-remote %s %s" % \
|
||||
(ud.basecmd, repourl, search)
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(ud.basecmd, ud.proto, username, ud.host, ud.path, search)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
@@ -396,19 +342,17 @@ class Git(FetchMethod):
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.strip().split('\n'):
|
||||
sha1, ref = l.split()
|
||||
if s == ref:
|
||||
return sha1
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output" % ud.unresolvedrev[name])
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
@@ -416,28 +360,25 @@ class Git(FetchMethod):
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
pupver = ('', '')
|
||||
|
||||
tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return pupver
|
||||
|
||||
verstring = ""
|
||||
revision = ""
|
||||
tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*^{}")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return ""
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
tag_head = line.split("/")[-1]
|
||||
line = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", tag_head)
|
||||
m = re.search("(alpha|beta|rc|final)+", line)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(tag_head)
|
||||
tag = tagregex.search(line)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
@@ -446,42 +387,14 @@ class Git(FetchMethod):
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
|
||||
verstring = tag
|
||||
revision = line.split()[0]
|
||||
pupver = (verstring, revision)
|
||||
|
||||
return pupver
|
||||
return verstring
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def gitpkgv_revision(self, ud, d, name):
|
||||
"""
|
||||
Return a sortable revision number by counting commits in the history
|
||||
Based on gitpkgv.bblass in meta-openembedded
|
||||
"""
|
||||
rev = self._build_revision(ud, d, name)
|
||||
localpath = ud.localpath
|
||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
||||
if not os.path.exists(localpath):
|
||||
commits = None
|
||||
else:
|
||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
||||
from pipes import quote
|
||||
commits = bb.fetch2.runfetchcmd(
|
||||
"git rev-list %s -- | wc -l" % (quote(rev)),
|
||||
d, quiet=True).strip().lstrip('0')
|
||||
if commits:
|
||||
open(rev_file, "w").write("%d\n" % int(commits))
|
||||
else:
|
||||
commits = open(rev_file, "r").readline(128).strip()
|
||||
if commits:
|
||||
return False, "%s+%s" % (commits, rev[:7])
|
||||
else:
|
||||
return True, str(rev)
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
return True
|
||||
|
||||
@@ -34,42 +34,43 @@ class GitANNEX(Git):
|
||||
"""
|
||||
return ud.type in ['gitannex']
|
||||
|
||||
def uses_annex(self, ud, d, wd):
|
||||
def uses_annex(self, ud, d):
|
||||
for name in ud.names:
|
||||
try:
|
||||
runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
|
||||
runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
|
||||
return True
|
||||
except bb.fetch.FetchError:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def update_annex(self, ud, d, wd):
|
||||
def update_annex(self, ud, d):
|
||||
try:
|
||||
runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
|
||||
runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
|
||||
except bb.fetch.FetchError:
|
||||
return False
|
||||
runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
|
||||
runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)
|
||||
|
||||
return True
|
||||
|
||||
def download(self, ud, d):
|
||||
Git.download(self, ud, d)
|
||||
|
||||
annex = self.uses_annex(ud, d, ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
annex = self.uses_annex(ud, d)
|
||||
if annex:
|
||||
self.update_annex(ud, d, ud.clonedir)
|
||||
self.update_annex(ud, d)
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
Git.unpack(self, ud, destdir, d)
|
||||
|
||||
os.chdir(ud.destdir)
|
||||
try:
|
||||
runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
|
||||
runfetchcmd("%s annex sync" % (ud.basecmd), d)
|
||||
except bb.fetch.FetchError:
|
||||
pass
|
||||
|
||||
annex = self.uses_annex(ud, d, ud.destdir)
|
||||
annex = self.uses_annex(ud, d)
|
||||
if annex:
|
||||
runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
|
||||
runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
|
||||
|
||||
runfetchcmd("%s annex get" % (ud.basecmd), d)
|
||||
runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
|
||||
|
||||
@@ -43,10 +43,10 @@ class GitSM(Git):
|
||||
"""
|
||||
return ud.type in ['gitsm']
|
||||
|
||||
def uses_submodules(self, ud, d, wd):
|
||||
def uses_submodules(self, ud, d):
|
||||
for name in ud.names:
|
||||
try:
|
||||
runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
|
||||
runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
|
||||
return True
|
||||
except bb.fetch.FetchError:
|
||||
pass
|
||||
@@ -107,25 +107,30 @@ class GitSM(Git):
|
||||
os.mkdir(tmpclonedir)
|
||||
os.rename(ud.clonedir, gitdir)
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
|
||||
os.chdir(tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
self._set_relative_paths(tmpclonedir)
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
|
||||
os.rename(gitdir, ud.clonedir,)
|
||||
bb.utils.remove(tmpclonedir, True)
|
||||
|
||||
def download(self, ud, d):
|
||||
Git.download(self, ud, d)
|
||||
|
||||
submodules = self.uses_submodules(ud, d, ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
submodules = self.uses_submodules(ud, d)
|
||||
if submodules:
|
||||
self.update_submodules(ud, d)
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
Git.unpack(self, ud, destdir, d)
|
||||
|
||||
submodules = self.uses_submodules(ud, d, ud.destdir)
|
||||
os.chdir(ud.destdir)
|
||||
submodules = self.uses_submodules(ud, d)
|
||||
if submodules:
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
|
||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
|
||||
runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
|
||||
|
||||
@@ -28,7 +28,6 @@ import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
import errno
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
@@ -44,13 +43,6 @@ class Hg(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Don't require checksums for local archives created from
|
||||
repository checkouts.
|
||||
"""
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init hg specific variable within url data
|
||||
@@ -60,12 +52,10 @@ class Hg(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "hg"
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
@@ -74,19 +64,7 @@ class Hg(FetchMethod):
|
||||
elif not ud.revision:
|
||||
ud.revision = self.latest_revision(ud, d)
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
||||
ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
|
||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
ud.localfile = ud.moddir
|
||||
ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
|
||||
|
||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
@@ -96,21 +74,14 @@ class Hg(FetchMethod):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.moddir):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
|
||||
host = ud.host
|
||||
@@ -127,7 +98,7 @@ class Hg(FetchMethod):
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command == "info":
|
||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
|
||||
@@ -140,22 +111,22 @@ class Hg(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
else:
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command == "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
|
||||
else:
|
||||
cmd = "%s pull" % (ud.basecmd)
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
@@ -166,53 +137,40 @@ class Hg(FetchMethod):
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
||||
except bb.fetch2.FetchError:
|
||||
# Runnning pull in the repo
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d, workdir=ud.moddir)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
# No source found, clone it.
|
||||
if not os.path.exists(ud.moddir):
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
|
||||
runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
|
||||
runfetchcmd(fetchcmd, d)
|
||||
|
||||
# Even when we clone (fetch), we still need to update as hg's clone
|
||||
# won't checkout the specified revision if its on a branch
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean the hg dir """
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
bb.utils.remove(ud.localpath, True)
|
||||
bb.utils.remove(ud.fullmirror)
|
||||
bb.utils.remove(ud.fullmirror + ".done")
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
@@ -233,38 +191,3 @@ class Hg(FetchMethod):
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
logger.info("Creating tarball of hg repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
|
||||
|
||||
def localpath(self, ud, d):
|
||||
return ud.pkgdir
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
"""
|
||||
Make a local clone or export for the url
|
||||
"""
|
||||
|
||||
revflag = "-r %s" % ud.revision
|
||||
subdir = ud.parm.get("destsuffix", ud.module)
|
||||
codir = "%s/%s" % (destdir, subdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata != "nokeep":
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
|
||||
|
||||
@@ -26,7 +26,7 @@ BitBake build tools.
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
import bb
|
||||
import bb.utils
|
||||
from bb import data
|
||||
@@ -42,10 +42,9 @@ class Local(FetchMethod):
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.basename = os.path.basename(ud.decodedurl)
|
||||
ud.basepath = ud.decodedurl
|
||||
ud.needdonestamp = False
|
||||
return
|
||||
|
||||
def localpath(self, urldata, d):
|
||||
@@ -113,7 +112,7 @@ class Local(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
|
||||
@@ -1,293 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' NPM implementation
|
||||
|
||||
The NPM fetcher is used to retrieve files from the npmjs repository
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
|
||||
Suported SRC_URI options are:
|
||||
|
||||
- name
|
||||
- version
|
||||
|
||||
npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV}
|
||||
The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import json
|
||||
import subprocess
|
||||
import signal
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import ChecksumError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import UnpackError
|
||||
from bb.fetch2 import ParameterError
|
||||
from distutils import spawn
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
# non-Python subprocesses expect.
|
||||
# SIGPIPE errors are known issues with gzip/bash
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
class Npm(FetchMethod):
|
||||
|
||||
"""Class to fetch urls via 'npm'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with npm
|
||||
"""
|
||||
return ud.type in ['npm']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "NpmFetch: %s", msg)
|
||||
|
||||
def clean(self, ud, d):
|
||||
logger.debug(2, "Calling cleanup %s" % ud.pkgname)
|
||||
bb.utils.remove(ud.localpath, False)
|
||||
bb.utils.remove(ud.pkgdatadir, True)
|
||||
bb.utils.remove(ud.fullmirror, False)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init NPM specific variable within url data
|
||||
"""
|
||||
if 'downloadfilename' in ud.parm:
|
||||
ud.basename = ud.parm['downloadfilename']
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
# can't call it ud.name otherwise fetcher base class will start doing sha1stuff
|
||||
# TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
|
||||
ud.pkgname = ud.parm.get("name", None)
|
||||
if not ud.pkgname:
|
||||
raise ParameterError("NPM fetcher requires a name parameter", ud.url)
|
||||
ud.version = ud.parm.get("version", None)
|
||||
if not ud.version:
|
||||
raise ParameterError("NPM fetcher requires a version parameter", ud.url)
|
||||
ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
|
||||
ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
|
||||
prefixdir = "npm/%s" % ud.pkgname
|
||||
ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
|
||||
if not os.path.exists(ud.pkgdatadir):
|
||||
bb.utils.mkdirhier(ud.pkgdatadir)
|
||||
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
|
||||
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
|
||||
ud.prefixdir = prefixdir
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
|
||||
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _runwget(self, ud, d, command, quiet):
|
||||
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
|
||||
bb.fetch2.check_network_access(d, command)
|
||||
dldir = d.getVar("DL_DIR", True)
|
||||
runfetchcmd(command, d, quiet, workdir=dldir)
|
||||
|
||||
def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
|
||||
file = data[pkg]['tgz']
|
||||
logger.debug(2, "file to extract is %s" % file)
|
||||
if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
|
||||
cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
|
||||
else:
|
||||
bb.fatal("NPM package %s downloaded not a tarball!" % file)
|
||||
|
||||
# Change to subdir before executing command
|
||||
if not os.path.exists(destdir):
|
||||
os.makedirs(destdir)
|
||||
path = d.getVar('PATH', True)
|
||||
if path:
|
||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, destdir))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
|
||||
|
||||
if ret != 0:
|
||||
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
|
||||
|
||||
if 'deps' not in data[pkg]:
|
||||
return
|
||||
for dep in data[pkg]['deps']:
|
||||
self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
|
||||
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
dldir = d.getVar("DL_DIR", True)
|
||||
depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
|
||||
with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
|
||||
workobj = json.load(datafile)
|
||||
dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
|
||||
|
||||
if 'subdir' in ud.parm:
|
||||
unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
|
||||
else:
|
||||
unpackdir = '%s/npmpkg' % destdir
|
||||
|
||||
self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
|
||||
|
||||
def _parse_view(self, output):
|
||||
'''
|
||||
Parse the output of npm view --json; the last JSON result
|
||||
is assumed to be the one that we're interested in.
|
||||
'''
|
||||
pdata = None
|
||||
outdeps = {}
|
||||
datalines = []
|
||||
bracelevel = 0
|
||||
for line in output.splitlines():
|
||||
if bracelevel:
|
||||
datalines.append(line)
|
||||
elif '{' in line:
|
||||
datalines = []
|
||||
datalines.append(line)
|
||||
bracelevel = bracelevel + line.count('{') - line.count('}')
|
||||
if datalines:
|
||||
pdata = json.loads('\n'.join(datalines))
|
||||
return pdata
|
||||
|
||||
def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
|
||||
if fetchedlist is None:
|
||||
fetchedlist = []
|
||||
pkgfullname = pkg
|
||||
if version != '*' and not '/' in version:
|
||||
pkgfullname += "@'%s'" % version
|
||||
logger.debug(2, "Calling getdeps on %s" % pkg)
|
||||
fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
|
||||
output = runfetchcmd(fetchcmd, d, True)
|
||||
pdata = self._parse_view(output)
|
||||
if not pdata:
|
||||
raise FetchError("The command '%s' returned no output" % fetchcmd)
|
||||
if optional:
|
||||
pkg_os = pdata.get('os', None)
|
||||
if pkg_os:
|
||||
if not isinstance(pkg_os, list):
|
||||
pkg_os = [pkg_os]
|
||||
if 'linux' not in pkg_os or '!linux' in pkg_os:
|
||||
logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
|
||||
return
|
||||
#logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
|
||||
outputurl = pdata['dist']['tarball']
|
||||
data[pkg] = {}
|
||||
data[pkg]['tgz'] = os.path.basename(outputurl)
|
||||
if not outputurl in fetchedlist:
|
||||
self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
|
||||
fetchedlist.append(outputurl)
|
||||
|
||||
dependencies = pdata.get('dependencies', {})
|
||||
optionalDependencies = pdata.get('optionalDependencies', {})
|
||||
depsfound = {}
|
||||
optdepsfound = {}
|
||||
data[pkg]['deps'] = {}
|
||||
for dep in dependencies:
|
||||
if dep in optionalDependencies:
|
||||
optdepsfound[dep] = dependencies[dep]
|
||||
else:
|
||||
depsfound[dep] = dependencies[dep]
|
||||
for dep, version in optdepsfound.items():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
|
||||
for dep, version in depsfound.items():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
|
||||
|
||||
def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
|
||||
logger.debug(2, "NPM shrinkwrap file is %s" % data)
|
||||
if toplevel:
|
||||
name = data.get('name', None)
|
||||
if name and name != pkg:
|
||||
for obj in data.get('dependencies', []):
|
||||
if obj == pkg:
|
||||
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
|
||||
return
|
||||
outputurl = "invalid"
|
||||
if ('resolved' not in data) or (not data['resolved'].startswith('http')):
|
||||
# will be the case for ${PN}
|
||||
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
|
||||
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
|
||||
outputurl = runfetchcmd(fetchcmd, d, True)
|
||||
else:
|
||||
outputurl = data['resolved']
|
||||
self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
|
||||
manifest[pkg] = {}
|
||||
manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
|
||||
manifest[pkg]['deps'] = {}
|
||||
|
||||
if pkg in lockdown:
|
||||
sha1_expected = lockdown[pkg][version]
|
||||
sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
|
||||
if sha1_expected != sha1_data:
|
||||
msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg)
|
||||
else:
|
||||
logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
|
||||
|
||||
if 'dependencies' in data:
|
||||
for obj in data['dependencies']:
|
||||
logger.debug(2, "Found dep is %s" % str(obj))
|
||||
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
jsondepobj = {}
|
||||
shrinkobj = {}
|
||||
lockdown = {}
|
||||
|
||||
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
|
||||
dest = d.getVar("DL_DIR", True)
|
||||
bb.utils.mkdirhier(dest)
|
||||
runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
|
||||
return
|
||||
|
||||
shwrf = d.getVar('NPM_SHRINKWRAP', True)
|
||||
logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
|
||||
try:
|
||||
with open(shwrf) as datafile:
|
||||
shrinkobj = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
|
||||
lckdf = d.getVar('NPM_LOCKDOWN', True)
|
||||
logger.debug(2, "NPM lockdown file is %s" % lckdf)
|
||||
try:
|
||||
with open(lckdf) as datafile:
|
||||
lockdown = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
|
||||
|
||||
if ('name' not in shrinkobj):
|
||||
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
|
||||
else:
|
||||
self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
|
||||
|
||||
with open(ud.localpath, 'w') as outfile:
|
||||
json.dump(jsondepobj, outfile)
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
dldir = d.getVar("DL_DIR", True)
|
||||
logger.info("Creating tarball of npm data")
|
||||
runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
|
||||
workdir=dldir)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
|
||||
@@ -34,13 +34,13 @@ class Osc(FetchMethod):
|
||||
|
||||
# Create paths to osc checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host)
|
||||
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
pv = d.getVar("PV", False)
|
||||
pv = data.getVar("PV", d, 0)
|
||||
rev = bb.fetch2.srcrev_internal_helper(ud, d)
|
||||
if rev and rev != True:
|
||||
ud.revision = rev
|
||||
@@ -84,25 +84,27 @@ class Osc(FetchMethod):
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK):
|
||||
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||
logger.info("Update "+ ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", oscupdatecmd)
|
||||
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
|
||||
runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
|
||||
runfetchcmd(oscupdatecmd, d)
|
||||
else:
|
||||
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", oscfetchcmd)
|
||||
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
|
||||
runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
|
||||
runfetchcmd(oscfetchcmd, d)
|
||||
|
||||
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
|
||||
cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return False
|
||||
@@ -112,7 +114,7 @@ class Osc(FetchMethod):
|
||||
Generate a .oscrc to be used for this run.
|
||||
"""
|
||||
|
||||
config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc")
|
||||
config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
|
||||
if (os.path.exists(config_path)):
|
||||
os.remove(config_path)
|
||||
|
||||
@@ -121,8 +123,8 @@ class Osc(FetchMethod):
|
||||
f.write("apisrv = %s\n" % ud.host)
|
||||
f.write("scheme = http\n")
|
||||
f.write("su-wrapper = su -c\n")
|
||||
f.write("build-root = %s\n" % d.getVar('WORKDIR', True))
|
||||
f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True))
|
||||
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||
f.write("extra-pkgs = gzip\n")
|
||||
f.write("\n")
|
||||
f.write("[%s]\n" % ud.host)
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for perforce
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2016 Kodak Alaris, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -23,7 +25,9 @@ BitBake 'Fetch' implementation for perforce
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
@@ -33,178 +37,151 @@ from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
|
||||
class Perforce(FetchMethod):
|
||||
""" Class to fetch from perforce repositories """
|
||||
def supports(self, ud, d):
|
||||
""" Check to see if a given url can be fetched with perforce. """
|
||||
return ud.type in ['p4']
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
Initialize perforce specific variables within url data. If P4CONFIG is
|
||||
provided by the env, use it. If P4PORT is specified by the recipe, use
|
||||
its values, which may override the settings in P4CONFIG.
|
||||
"""
|
||||
ud.basecmd = d.getVar('FETCHCMD_p4', True)
|
||||
if not ud.basecmd:
|
||||
ud.basecmd = "/usr/bin/env p4"
|
||||
|
||||
ud.dldir = d.getVar('P4DIR', True)
|
||||
if not ud.dldir:
|
||||
ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4')
|
||||
|
||||
path = ud.url.split('://')[1]
|
||||
path = path.split(';')[0]
|
||||
delim = path.find('@');
|
||||
def doparse(url, d):
|
||||
parm = {}
|
||||
path = url.split("://")[1]
|
||||
delim = path.find("@");
|
||||
if delim != -1:
|
||||
(ud.user, ud.pswd) = path.split('@')[0].split(':')
|
||||
ud.path = path.split('@')[1]
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
ud.path = path
|
||||
(host, port) = d.getVar('P4PORT').split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
ud.usingp4config = False
|
||||
p4port = d.getVar('P4PORT', True)
|
||||
if path.find(";") != -1:
|
||||
keys=[]
|
||||
values=[]
|
||||
plist = path.split(';')
|
||||
for item in plist:
|
||||
if item.count('='):
|
||||
(key, value) = item.split('=')
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
|
||||
if p4port:
|
||||
logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
|
||||
ud.host = p4port
|
||||
else:
|
||||
logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
|
||||
ud.usingp4config = True
|
||||
p4cmd = '%s info | grep "Server address"' % ud.basecmd
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
ud.host = runfetchcmd(p4cmd, d, True)
|
||||
ud.host = ud.host.split(': ')[1].strip()
|
||||
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
|
||||
if not ud.host:
|
||||
raise FetchError('Could not determine P4PORT from P4CONFIG')
|
||||
|
||||
if ud.path.find('/...') >= 0:
|
||||
ud.pathisdir = True
|
||||
else:
|
||||
ud.pathisdir = False
|
||||
parm = dict(zip(keys, values))
|
||||
path = "//" + path.split(';')[0]
|
||||
host += ":%s" % (port)
|
||||
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
|
||||
cleanedhost = ud.host.replace(':', '.')
|
||||
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
|
||||
return host, path, user, pswd, parm
|
||||
doparse = staticmethod(doparse)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d)
|
||||
|
||||
def _buildp4command(self, ud, d, command, depot_filename=None):
|
||||
"""
|
||||
Build a p4 commandline. Valid commands are "changes", "print", and
|
||||
"files". depot_filename is the full path to the file in the depot
|
||||
including the trailing '#rev' value.
|
||||
"""
|
||||
def getcset(d, depot, host, user, pswd, parm):
|
||||
p4opt = ""
|
||||
if "cset" in parm:
|
||||
return parm["cset"];
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
if ud.user:
|
||||
p4opt += ' -u "%s"' % (ud.user)
|
||||
p4date = d.getVar("P4DATE", True)
|
||||
if "revision" in parm:
|
||||
depot += "#%s" % (parm["revision"])
|
||||
elif "label" in parm:
|
||||
depot += "@%s" % (parm["label"])
|
||||
elif p4date:
|
||||
depot += "@%s" % (p4date)
|
||||
|
||||
if ud.pswd:
|
||||
p4opt += ' -P "%s"' % (ud.pswd)
|
||||
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
|
||||
if ud.host and not ud.usingp4config:
|
||||
p4opt += ' -p %s' % (ud.host)
|
||||
return cset.split(' ')[1]
|
||||
getcset = staticmethod(getcset)
|
||||
|
||||
if hasattr(ud, 'revision') and ud.revision:
|
||||
pathnrev = '%s@%s' % (ud.path, ud.revision)
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
pathnrev = '%s' % (ud.path)
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
if depot_filename:
|
||||
if ud.pathisdir: # Remove leading path to obtain filename
|
||||
filename = depot_filename[len(ud.path)-1:]
|
||||
else:
|
||||
filename = depot_filename[depot_filename.rfind('/'):]
|
||||
filename = filename[:filename.find('#')] # Remove trailing '#rev'
|
||||
|
||||
if command == 'changes':
|
||||
p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
|
||||
elif command == 'print':
|
||||
if depot_filename != None:
|
||||
p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
|
||||
else:
|
||||
raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
|
||||
elif command == 'files':
|
||||
p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
|
||||
else:
|
||||
raise FetchError('Invalid p4 command %s' % command, ud.url)
|
||||
|
||||
return p4cmd
|
||||
|
||||
def _p4listfiles(self, ud, d):
|
||||
"""
|
||||
Return a list of the file names which are present in the depot using the
|
||||
'p4 files' command, including trailing '#rev' file revision indicator
|
||||
"""
|
||||
p4cmd = self._buildp4command(ud, d, 'files')
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
p4fileslist = runfetchcmd(p4cmd, d, True)
|
||||
p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
|
||||
|
||||
if not p4fileslist:
|
||||
raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
|
||||
|
||||
count = 0
|
||||
filelist = []
|
||||
|
||||
for filename in p4fileslist:
|
||||
item = filename.split(' - ')
|
||||
lastaction = item[1].split()
|
||||
logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
|
||||
if lastaction[0] == 'delete':
|
||||
continue
|
||||
filelist.append(item[0])
|
||||
|
||||
return filelist
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
""" Get the list of files, fetch each one """
|
||||
filelist = self._p4listfiles(ud, d)
|
||||
if not filelist:
|
||||
raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
|
||||
"""
|
||||
Fetch urls
|
||||
"""
|
||||
|
||||
bb.utils.remove(ud.pkgdir, True)
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
(host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
for afile in filelist:
|
||||
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
|
||||
bb.fetch2.check_network_access(d, p4fetchcmd)
|
||||
runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot
|
||||
|
||||
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Cleanup p4 specific files and dirs"""
|
||||
bb.utils.remove(ud.localpath)
|
||||
bb.utils.remove(ud.pkgdir, True)
|
||||
# Get the p4 command
|
||||
p4opt = ""
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
""" Return a unique key for the url """
|
||||
return 'p4:%s' % ud.pkgdir
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
def _latest_revision(self, ud, d, name):
|
||||
""" Return the latest upstream scm revision number """
|
||||
p4cmd = self._buildp4command(ud, d, "changes")
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
tip = runfetchcmd(p4cmd, d, True)
|
||||
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
|
||||
|
||||
if not tip:
|
||||
raise FetchError('Could not determine the latest perforce changelist')
|
||||
# create temp directory
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(d.expand('${WORKDIR}'))
|
||||
mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
|
||||
tmpfile, errors = bb.process.run(mktemp)
|
||||
tmpfile = tmpfile.strip()
|
||||
if not tmpfile:
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
|
||||
|
||||
tipcset = tip.split(' ')[1]
|
||||
logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
|
||||
return tipcset
|
||||
if "label" in parm:
|
||||
depot = "%s@%s" % (depot, parm["label"])
|
||||
else:
|
||||
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||
depot = "%s@%s" % (depot, cset)
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
""" Return a sortable revision number """
|
||||
return False, self._build_revision(ud, d)
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + ud.url)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = [f.rstrip() for f in p4file.splitlines()]
|
||||
|
||||
def _build_revision(self, ud, d):
|
||||
return ud.revision
|
||||
if not p4file:
|
||||
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
|
||||
|
||||
count = 0
|
||||
|
||||
for file in p4file:
|
||||
list = file.split()
|
||||
|
||||
if list[2] == "delete":
|
||||
continue
|
||||
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
logger.error()
|
||||
raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
|
||||
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
|
||||
# cleanup
|
||||
bb.utils.prunedir(tmpfile)
|
||||
|
||||
@@ -69,23 +69,24 @@ class Repo(FetchMethod):
|
||||
else:
|
||||
username = ""
|
||||
|
||||
repodir = os.path.join(codir, "repo")
|
||||
bb.utils.mkdirhier(repodir)
|
||||
if not os.path.exists(os.path.join(repodir, ".repo")):
|
||||
bb.utils.mkdirhier(os.path.join(codir, "repo"))
|
||||
os.chdir(os.path.join(codir, "repo"))
|
||||
if not os.path.exists(os.path.join(codir, "repo", ".repo")):
|
||||
bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
|
||||
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
|
||||
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
|
||||
|
||||
bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
|
||||
runfetchcmd("repo sync", d, workdir=repodir)
|
||||
runfetchcmd("repo sync", d)
|
||||
os.chdir(codir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude='.repo' --exclude='.git'"
|
||||
tar_flags = "--exclude '.repo' --exclude '.git'"
|
||||
|
||||
# Create a cache
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
|
||||
|
||||
def supports_srcrev(self):
|
||||
return False
|
||||
|
||||
@@ -61,7 +61,8 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
|
||||
|
||||
import os
|
||||
import bb
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
import commands
|
||||
from bb import data
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -92,13 +93,13 @@ class SFTP(FetchMethod):
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch urls"""
|
||||
|
||||
urlo = URI(ud.url)
|
||||
basecmd = 'sftp -oBatchMode=yes'
|
||||
basecmd = 'sftp -oPasswordAuthentication=no'
|
||||
port = ''
|
||||
if urlo.port:
|
||||
port = '-P %d' % urlo.port
|
||||
@@ -120,7 +121,8 @@ class SFTP(FetchMethod):
|
||||
|
||||
remote = '%s%s:%s' % (user, urlo.hostname, path)
|
||||
|
||||
cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
|
||||
cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
|
||||
commands.mkarg(lpath))
|
||||
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
runfetchcmd(cmd, d)
|
||||
|
||||
@@ -114,10 +114,12 @@ class SSH(FetchMethod):
|
||||
fr = host
|
||||
fr += ':%s' % path
|
||||
|
||||
|
||||
import commands
|
||||
cmd = 'scp -B -r %s %s %s/' % (
|
||||
portarg,
|
||||
fr,
|
||||
dldir
|
||||
commands.mkarg(fr),
|
||||
commands.mkarg(dldir)
|
||||
)
|
||||
|
||||
bb.fetch2.check_network_access(d, cmd, urldata.url)
|
||||
|
||||
@@ -54,11 +54,6 @@ class Svn(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if not "path_spec" in ud.parm:
|
||||
ud.path_spec = ud.module
|
||||
else:
|
||||
ud.path_spec = ud.parm["path_spec"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||
@@ -107,7 +102,7 @@ class Svn(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
transportuser = ud.parm.get("transportuser", "")
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
else:
|
||||
@@ -126,32 +121,35 @@ class Svn(FetchMethod):
|
||||
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
||||
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
# We need to attempt to run svn upgrade first in case its an older working format
|
||||
try:
|
||||
runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
|
||||
runfetchcmd(ud.basecmd + " upgrade", d)
|
||||
except FetchError:
|
||||
pass
|
||||
logger.debug(1, "Running %s", svnupdatecmd)
|
||||
bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
|
||||
runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
|
||||
runfetchcmd(svnupdatecmd, d)
|
||||
else:
|
||||
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.debug(1, "Running %s", svnfetchcmd)
|
||||
bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
|
||||
runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)
|
||||
runfetchcmd(svnfetchcmd, d)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude='.svn'"
|
||||
tar_flags = "--exclude '.svn'"
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
|
||||
cleanup=[ud.localpath], workdir=ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean SVN specific files and dirs """
|
||||
|
||||
@@ -31,37 +31,13 @@ import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
import bb.progress
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.utils import export_proxies
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import SoupStrainer
|
||||
|
||||
class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
|
||||
"""
|
||||
Extract progress information from wget output.
|
||||
Note: relies on --progress=dot (with -v or without -q/-nv) being
|
||||
specified on the wget command line.
|
||||
"""
|
||||
def __init__(self, d):
|
||||
super(WgetProgressHandler, self).__init__(d)
|
||||
# Send an initial progress event so the bar gets shown
|
||||
self._fire_progress(0)
|
||||
|
||||
def writeline(self, line):
|
||||
percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
|
||||
if percs:
|
||||
progress = int(percs[-1][0])
|
||||
rate = percs[-1][1] + '/s'
|
||||
self.update(progress, rate)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -84,19 +60,15 @@ class Wget(FetchMethod):
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
|
||||
if not ud.localfile:
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
|
||||
|
||||
def _runwget(self, ud, d, command, quiet):
|
||||
|
||||
progresshandler = WgetProgressHandler(d)
|
||||
|
||||
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
|
||||
bb.fetch2.check_network_access(d, command)
|
||||
runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler)
|
||||
runfetchcmd(command, d, quiet)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch urls"""
|
||||
@@ -108,10 +80,6 @@ class Wget(FetchMethod):
|
||||
bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
|
||||
fetchcmd += " -O " + dldir + os.sep + ud.localfile
|
||||
|
||||
if ud.user:
|
||||
up = ud.user.split(":")
|
||||
fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (up[0],up[1])
|
||||
|
||||
uri = ud.url.split(";")[0]
|
||||
if os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
@@ -132,194 +100,13 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d, try_again=True):
|
||||
import urllib.request, urllib.error, urllib.parse, socket, http.client
|
||||
from urllib.response import addinfourl
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
def checkstatus(self, ud, d):
|
||||
|
||||
class HTTPConnectionCache(http.client.HTTPConnection):
|
||||
if fetch.connection_cache:
|
||||
def connect(self):
|
||||
"""Connect to the host and port specified in __init__."""
|
||||
uri = ud.url.split(";")[0]
|
||||
fetchcmd = self.basecmd + " --spider '%s'" % uri
|
||||
|
||||
sock = fetch.connection_cache.get_connection(self.host, self.port)
|
||||
if sock:
|
||||
self.sock = sock
|
||||
else:
|
||||
self.sock = socket.create_connection((self.host, self.port),
|
||||
self.timeout, self.source_address)
|
||||
fetch.connection_cache.add_connection(self.host, self.port, self.sock)
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
if self._tunnel_host:
|
||||
self._tunnel()
|
||||
|
||||
class CacheHTTPHandler(urllib.request.HTTPHandler):
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnectionCache, req)
|
||||
|
||||
def do_open(self, http_class, req):
|
||||
"""Return an addinfourl object for the request, using http_class.
|
||||
|
||||
http_class must implement the HTTPConnection API from httplib.
|
||||
The addinfourl return value is a file-like object. It also
|
||||
has methods and attributes including:
|
||||
- info(): return a mimetools.Message object for the headers
|
||||
- geturl(): return the original request URL
|
||||
- code: HTTP status code
|
||||
"""
|
||||
host = req.host
|
||||
if not host:
|
||||
raise urlllib2.URLError('no host given')
|
||||
|
||||
h = http_class(host, timeout=req.timeout) # will parse host:port
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in list(req.headers.items())
|
||||
if k not in headers))
|
||||
|
||||
# We want to make an HTTP/1.1 request, but the addinfourl
|
||||
# class isn't prepared to deal with a persistent connection.
|
||||
# It will try to read all remaining data from the socket,
|
||||
# which will block while the server waits for the next request.
|
||||
# So make sure the connection gets closed after the (only)
|
||||
# request.
|
||||
|
||||
# Don't close connection when connection_cache is enabled,
|
||||
if fetch.connection_cache is None:
|
||||
headers["Connection"] = "close"
|
||||
else:
|
||||
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
|
||||
|
||||
headers = dict(
|
||||
(name.title(), val) for name, val in list(headers.items()))
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
proxy_auth_hdr = "Proxy-Authorization"
|
||||
if proxy_auth_hdr in headers:
|
||||
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
||||
# Proxy-Authorization should not be sent to origin
|
||||
# server.
|
||||
del headers[proxy_auth_hdr]
|
||||
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
||||
|
||||
try:
|
||||
h.request(req.get_method(), req.selector, req.data, headers)
|
||||
except socket.error as err: # XXX what error?
|
||||
# Don't close connection when cache is enabled.
|
||||
if fetch.connection_cache is None:
|
||||
h.close()
|
||||
raise urllib.error.URLError(err)
|
||||
else:
|
||||
try:
|
||||
r = h.getresponse(buffering=True)
|
||||
except TypeError: # buffering kw not supported
|
||||
r = h.getresponse()
|
||||
|
||||
# Pick apart the HTTPResponse object to get the addinfourl
|
||||
# object initialized properly.
|
||||
|
||||
# Wrap the HTTPResponse object in socket's file object adapter
|
||||
# for Windows. That adapter calls recv(), so delegate recv()
|
||||
# to read(). This weird wrapping allows the returned object to
|
||||
# have readline() and readlines() methods.
|
||||
|
||||
# XXX It might be better to extract the read buffering code
|
||||
# out of socket._fileobject() and into a base class.
|
||||
r.recv = r.read
|
||||
|
||||
# no data, just have to read
|
||||
r.read()
|
||||
class fp_dummy(object):
|
||||
def read(self):
|
||||
return ""
|
||||
def readline(self):
|
||||
return ""
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
|
||||
# Close connection when server request it.
|
||||
if fetch.connection_cache is not None:
|
||||
if 'Connection' in r.msg and r.msg['Connection'] == 'close':
|
||||
fetch.connection_cache.remove_connection(h.host, h.port)
|
||||
|
||||
return resp
|
||||
|
||||
class HTTPMethodFallback(urllib.request.BaseHandler):
|
||||
"""
|
||||
Fallback to GET if HEAD is not allowed (405 HTTP error)
|
||||
"""
|
||||
def http_error_405(self, req, fp, code, msg, headers):
|
||||
fp.read()
|
||||
fp.close()
|
||||
|
||||
newheaders = dict((k,v) for k,v in list(req.headers.items())
|
||||
if k.lower() not in ("content-length", "content-type"))
|
||||
return self.parent.open(urllib.request.Request(req.get_full_url(),
|
||||
headers=newheaders,
|
||||
origin_req_host=req.origin_req_host,
|
||||
unverifiable=True))
|
||||
|
||||
"""
|
||||
Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
|
||||
Forbidden when they actually mean 405 Method Not Allowed.
|
||||
"""
|
||||
http_error_403 = http_error_405
|
||||
|
||||
"""
|
||||
Some servers (e.g. FusionForge) returns 406 Not Acceptable when they
|
||||
actually mean 405 Method Not Allowed.
|
||||
"""
|
||||
http_error_406 = http_error_405
|
||||
|
||||
class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
|
||||
"""
|
||||
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
|
||||
when we want to follow redirects using the original method.
|
||||
"""
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
|
||||
newreq.get_method = lambda: req.get_method()
|
||||
return newreq
|
||||
exported_proxies = export_proxies(d)
|
||||
|
||||
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
|
||||
if export_proxies:
|
||||
handlers.append(urllib.request.ProxyHandler())
|
||||
handlers.append(CacheHTTPHandler())
|
||||
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
|
||||
# see PEP-0476, this causes verification errors on some https servers
|
||||
# so disable by default.
|
||||
import ssl
|
||||
if hasattr(ssl, '_create_unverified_context'):
|
||||
handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
|
||||
opener = urllib.request.build_opener(*handlers)
|
||||
|
||||
try:
|
||||
uri = ud.url.split(";")[0]
|
||||
r = urllib.request.Request(uri)
|
||||
r.get_method = lambda: "HEAD"
|
||||
|
||||
if ud.user:
|
||||
import base64
|
||||
encodeuser = base64.b64encode(ud.user.encode('utf-8')).decode("utf-8")
|
||||
authheader = "Basic %s" % encodeuser
|
||||
r.add_header("Authorization", authheader)
|
||||
|
||||
opener.open(r)
|
||||
except urllib.error.URLError as e:
|
||||
if try_again:
|
||||
logger.debug(2, "checkstatus: trying again")
|
||||
return self.checkstatus(fetch, ud, d, False)
|
||||
else:
|
||||
# debug for now to avoid spamming the logs in e.g. remote sstate searches
|
||||
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
@@ -420,7 +207,7 @@ class Wget(FetchMethod):
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
@@ -459,10 +246,10 @@ class Wget(FetchMethod):
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
|
||||
dirver_regex = re.compile("(\D*)((\d+[\.-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group('ver')
|
||||
version_dir[1] = s.group(2)
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
@@ -470,26 +257,16 @@ class Wget(FetchMethod):
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
sver = s.group('ver')
|
||||
|
||||
# When prefix is part of the version directory it need to
|
||||
# ensure that only version directory is used so remove previous
|
||||
# directories if exists.
|
||||
#
|
||||
# Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
|
||||
# result is v2.5.
|
||||
spfx = s.group('pfx').split('/')[-1]
|
||||
|
||||
version_dir_new = ['', sver, '']
|
||||
version_dir_new = ['', s.group(2), '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = spfx + sver
|
||||
dirver_new = s.group(1) + s.group(2)
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
@@ -528,7 +305,7 @@ class Wget(FetchMethod):
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
@@ -543,7 +320,7 @@ class Wget(FetchMethod):
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
|
||||
pn_regex = d.getVar('REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
@@ -570,16 +347,16 @@ class Wget(FetchMethod):
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return (current_version[1], '')
|
||||
return current_version[1]
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ('', '')
|
||||
return ""
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
|
||||
regex_uri = d.getVar("REGEX_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
@@ -593,12 +370,12 @@ class Wget(FetchMethod):
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return (self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d), '')
|
||||
return self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d)
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return (self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d), '')
|
||||
return self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d)
|
||||
|
||||
@@ -27,7 +27,6 @@ import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
import fcntl
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
@@ -37,76 +36,29 @@ from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
__version__ = "1.26.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(Exception):
|
||||
class BBMainException(bb.BBHandledException):
|
||||
pass
|
||||
|
||||
def present_options(optionlist):
|
||||
if len(optionlist) > 1:
|
||||
return ' or '.join([', '.join(optionlist[:-1]), optionlist[-1]])
|
||||
else:
|
||||
return optionlist[0]
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
def format_option(self, option):
|
||||
# We need to do this here rather than in the text we supply to
|
||||
# add_option() because we don't want to call list_extension_modules()
|
||||
# on every execution (since it imports all of the modules)
|
||||
# Note also that we modify option.help rather than the returned text
|
||||
# - this is so that we don't have to re-format the text ourselves
|
||||
if option.dest == 'ui':
|
||||
valid_uis = list_extension_modules(bb.ui, 'main')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
|
||||
elif option.dest == 'servertype':
|
||||
valid_server_types = list_extension_modules(bb.server, 'BitBakeServer')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_server_types))
|
||||
interface = config.ui
|
||||
|
||||
return optparse.IndentedHelpFormatter.format_option(self, option)
|
||||
|
||||
def list_extension_modules(pkg, checkattr):
|
||||
"""
|
||||
Lists extension modules in a specific Python package
|
||||
(e.g. UIs, servers). NOTE: Calling this function will import all of the
|
||||
submodules of the specified module in order to check for the specified
|
||||
attribute; this can have unusual side-effects. As a result, this should
|
||||
only be called when displaying help text or error messages.
|
||||
Parameters:
|
||||
pkg: previously imported Python package to list
|
||||
checkattr: attribute to look for in module to determine if it's valid
|
||||
as the type of extension you are looking for
|
||||
"""
|
||||
import pkgutil
|
||||
pkgdir = os.path.dirname(pkg.__file__)
|
||||
|
||||
modules = []
|
||||
for _, modulename, _ in pkgutil.iter_modules([pkgdir]):
|
||||
if os.path.isdir(os.path.join(pkgdir, modulename)):
|
||||
# ignore directories
|
||||
continue
|
||||
try:
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
except:
|
||||
# If we can't import it, it's not valid
|
||||
continue
|
||||
module_if = getattr(module, modulename)
|
||||
if getattr(module_if, 'hidden_extension', False):
|
||||
continue
|
||||
if not checkattr or hasattr(module_if, checkattr):
|
||||
modules.append(modulename)
|
||||
return modules
|
||||
|
||||
def import_extension_module(pkg, modulename, checkattr):
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
return getattr(module, modulename)
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
modules = present_options(list_extension_modules(pkg, checkattr))
|
||||
raise BBMainException('FATAL: Unable to import extension module "%s" from %s. '
|
||||
'Valid extension modules: %s' % (modulename, pkg.__name__, modules))
|
||||
raise BBMainException("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
@@ -117,7 +69,7 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warning(s)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
@@ -131,189 +83,128 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
formatter=BitbakeHelpFormatter(),
|
||||
version="BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage="""%prog [options] [recipename/target recipe:do_task ...]
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
|
||||
help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
|
||||
"not handle any dependencies from other recipes.")
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
|
||||
help="Continue as much as possible after an error. While the target that "
|
||||
"failed and anything depending on it cannot be built, as much as "
|
||||
"possible will be built before stopping.")
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", action="store_true",
|
||||
dest="tryaltconfigs", default=False,
|
||||
help="Continue with builds by trying to use alternative providers "
|
||||
"where possible.")
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
|
||||
help="Force the specified targets/task to run (invalidating any "
|
||||
"existing stamp file).")
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", action="store", dest="cmd",
|
||||
help="Specify the task to execute. The exact options available "
|
||||
"depend on the metadata. Some examples might be 'compile'"
|
||||
" or 'populate_sysroot' or 'listtasks' may give a list of "
|
||||
"the tasks available.")
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
|
||||
help="Invalidate the stamp for the specified task such as 'compile' "
|
||||
"and then run the default task for the specified target(s).")
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
|
||||
help="Read the specified file before bitbake.conf.")
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
|
||||
help="Read the specified file after bitbake.conf.")
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
|
||||
help="Output more log message data to the terminal.")
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
|
||||
help="Increase the debug level. You can specify this more than once.")
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False,
|
||||
help="Output less log message data to the terminal.")
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
|
||||
help="Don't execute, just go through the motions.")
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
|
||||
default=[], metavar="SIGNATURE_HANDLER",
|
||||
help="Dump out the signature construction information, with no task "
|
||||
"execution. The SIGNATURE_HANDLER parameter is passed to the "
|
||||
"handler. Two common values are none and printdiff but the handler "
|
||||
"may define more/less. none means only dump the signature, printdiff"
|
||||
" means compare the dumped signature with the cached one.")
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-p", "--parse-only", action="store_true",
|
||||
dest="parse_only", default=False,
|
||||
help="Quit after parsing the BB recipes.")
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", action="store_true",
|
||||
dest="show_versions", default=False,
|
||||
help="Show current and preferred versions of all recipes.")
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", action="store_true",
|
||||
dest="show_environment", default=False,
|
||||
help="Show the global or per-recipe environment complete with information"
|
||||
" about where variables were set/changed.")
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
|
||||
help="Save dependency tree information for the specified "
|
||||
"targets in the dot syntax.")
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", action="append",
|
||||
dest="extra_assume_provided", default=[],
|
||||
help="Assume these dependencies don't exist and are already provided "
|
||||
"(equivalent to ASSUME_PROVIDED). Useful to make dependency "
|
||||
"graphs more appealing")
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
|
||||
help="Show debug logging for the specified logging domains")
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
|
||||
help="Profile the command and save reports.")
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-u", "--ui", action="store", dest="ui",
|
||||
default=os.environ.get('BITBAKE_UI', 'knotty'),
|
||||
help="The user interface to use (@CHOICES@ - default %default).")
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-t", "--servertype", action="store", dest="servertype",
|
||||
default=["process", "xmlrpc"]["BBSERVER" in os.environ],
|
||||
help="Choose which server type to use (@CHOICES@ - default %default).")
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--token", action="store", dest="xmlrpctoken",
|
||||
default=os.environ.get("BBTOKEN"),
|
||||
help="Specify the connection token to be used when connecting "
|
||||
"to a remote server.")
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--revisions-changed", action="store_true",
|
||||
dest="revisions_changed", default=False,
|
||||
help="Set the exit code depending on whether upstream floating "
|
||||
"revisions have changed or not.")
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", action="store_true",
|
||||
dest="server_only", default=False,
|
||||
help="Run bitbake without a UI, only starting a server "
|
||||
"(cooker) process.")
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--foreground", action="store_true",
|
||||
help="Run bitbake server in foreground.")
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
|
||||
help="The name/address for the bitbake server to bind to.")
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-T", "--idle-timeout", type=int,
|
||||
default=int(os.environ.get("BBTIMEOUT", "0")),
|
||||
help="Set timeout to unload bitbake server due to inactivity")
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", action="store_true",
|
||||
dest="nosetscene", default=False,
|
||||
help="Do not run any setscene tasks. sstate will be ignored and "
|
||||
"everything needed, built.")
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--setscene-only", action="store_true",
|
||||
dest="setsceneonly", default=False,
|
||||
help="Only run setscene tasks, don't run any real tasks.")
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", action="store", dest="remote_server",
|
||||
default=os.environ.get("BBSERVER"),
|
||||
help="Connect to the specified server.")
|
||||
|
||||
parser.add_option("-m", "--kill-server", action="store_true",
|
||||
dest="kill_server", default=False,
|
||||
help="Terminate the remote server.")
|
||||
|
||||
parser.add_option("", "--observe-only", action="store_true",
|
||||
dest="observe_only", default=False,
|
||||
help="Connect to a server as an observing-only client.")
|
||||
|
||||
parser.add_option("", "--status-only", action="store_true",
|
||||
dest="status_only", default=False,
|
||||
help="Check the status of the remote bitbake server.")
|
||||
|
||||
parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
|
||||
default=os.environ.get("BBEVENTLOG"),
|
||||
help="Writes the event log of the build to a bitbake event json file. "
|
||||
"Use '' (empty string) to assign the name automatically.")
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
if options.quiet and options.verbose:
|
||||
parser.error("options --quiet and --verbose are mutually exclusive")
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if options.quiet and options.debug:
|
||||
parser.error("options --quiet and --debug are mutually exclusive")
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
# use configuration files from environment variables
|
||||
if "BBPRECONF" in os.environ:
|
||||
options.prefile.append(os.environ["BBPRECONF"])
|
||||
|
||||
if "BBPOSTCONF" in os.environ:
|
||||
options.postfile.append(os.environ["BBPOSTCONF"])
|
||||
if "BBEVENTLOG" is os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
from datetime import datetime
|
||||
eventlog = "bitbake_eventlog_%s.json" % datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
options.writeeventlog = eventlog
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
port = -1
|
||||
if options.remote_server != 'autostart':
|
||||
host, port = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
@@ -330,22 +221,19 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
if options.remote_server != 'autostart':
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
single_use = not configParams.server_only and os.getenv('BBSERVER') != 'autostart'
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)), single_use=single_use,
|
||||
idle_timeout=configParams.idle_timeout)
|
||||
configuration.interface = [server.serverImpl.host, server.serverImpl.port]
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer(single_use=single_use)
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
@@ -356,17 +244,20 @@ def start_server(servermodule, configParams, configuration, features):
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
import queue
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise
|
||||
if not configParams.foreground:
|
||||
server.detach()
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
@@ -382,18 +273,28 @@ def bitbake_main(configParams, configuration):
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
# Reopen with O_SYNC (unbuffered)
|
||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
||||
fl |= os.O_SYNC
|
||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
|
||||
servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
@@ -405,11 +306,7 @@ def bitbake_main(configParams, configuration):
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option"))
|
||||
|
||||
elif configParams.foreground:
|
||||
raise BBMainException("FATAL: The '--foreground' option can only be used "
|
||||
"with --server-only.\n")
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
@@ -424,8 +321,7 @@ def bitbake_main(configParams, configuration):
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to "
|
||||
"terminate a remote server")
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
@@ -433,7 +329,7 @@ def bitbake_main(configParams, configuration):
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
@@ -449,29 +345,13 @@ def bitbake_main(configParams, configuration):
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if configParams.server_only:
|
||||
for param in ('prefile', 'postfile'):
|
||||
value = getattr(configParams, param)
|
||||
if value:
|
||||
setattr(configuration, "%s_server" % param, value)
|
||||
param = "%s_server" % param
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
if os.getenv('BBSERVER') == 'autostart':
|
||||
if configParams.remote_server == 'autostart' or \
|
||||
not servermodule.check_connection(configParams.remote_server, timeout=2):
|
||||
configParams.bind = 'localhost:0'
|
||||
srv = start_server(servermodule, configParams, configuration, featureset)
|
||||
configParams.remote_server = '%s:%d' % tuple(configuration.interface)
|
||||
bb.event.ui_queue = []
|
||||
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
|
||||
configParams.xmlrpctoken)
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
@@ -479,15 +359,10 @@ def bitbake_main(configParams, configuration):
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
return 0
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
server_connection.setupEventQueue()
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
@@ -499,17 +374,18 @@ def bitbake_main(configParams, configuration):
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events,
|
||||
configParams)
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host,
|
||||
server.serverImpl.port))
|
||||
if configParams.foreground:
|
||||
server.serverImpl.serve_forever()
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
@@ -19,22 +19,11 @@
|
||||
|
||||
from bb.utils import better_compile, better_exec
|
||||
|
||||
def insert_method(modulename, code, fn, lineno):
|
||||
def insert_method(modulename, code, fn):
|
||||
"""
|
||||
Add code of a module should be added. The methods
|
||||
will be simply added, no checking will be done
|
||||
"""
|
||||
comp = better_compile(code, modulename, fn, lineno=lineno)
|
||||
comp = better_compile(code, modulename, fn )
|
||||
better_exec(comp, None, code, fn)
|
||||
|
||||
compilecache = {}
|
||||
|
||||
def compile_cache(code):
|
||||
h = hash(code)
|
||||
if h in compilecache:
|
||||
return compilecache[h]
|
||||
return None
|
||||
|
||||
def compile_cache_add(code, compileobj):
|
||||
h = hash(code)
|
||||
compilecache[h] = compileobj
|
||||
|
||||
@@ -220,7 +220,7 @@ class diskMonitor:
|
||||
if minSpace and freeSpace < minSpace:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
|
||||
logger.warning("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
(path, dev, freeSpace / 1024 / 1024 / 1024.0))
|
||||
self.preFreeS[k] = freeSpace
|
||||
|
||||
@@ -246,7 +246,7 @@ class diskMonitor:
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
|
||||
logger.warning("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
(path, dev, freeInode / 1024.0))
|
||||
self.preFreeI[k] = freeInode
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ class BBLogFormatter(logging.Formatter):
|
||||
}
|
||||
|
||||
color_enabled = False
|
||||
BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(29,38))
|
||||
BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
|
||||
|
||||
COLORS = {
|
||||
DEBUG3 : CYAN,
|
||||
@@ -90,9 +90,8 @@ class BBLogFormatter(logging.Formatter):
|
||||
if self.color_enabled:
|
||||
record = self.colorize(record)
|
||||
msg = logging.Formatter.format(self, record)
|
||||
if hasattr(record, 'bb_exc_formatted'):
|
||||
msg += '\n' + ''.join(record.bb_exc_formatted)
|
||||
elif hasattr(record, 'bb_exc_info'):
|
||||
|
||||
if hasattr(record, 'bb_exc_info'):
|
||||
etype, value, tb = record.bb_exc_info
|
||||
formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
msg += '\n' + ''.join(formatted)
|
||||
@@ -151,7 +150,7 @@ loggerDefaultVerbose = False
|
||||
loggerVerboseLogs = False
|
||||
loggerDefaultDomains = []
|
||||
|
||||
def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
"""
|
||||
Set default verbosity and debug levels config the logger
|
||||
"""
|
||||
@@ -159,10 +158,7 @@ def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
bb.msg.loggerDefaultVerbose = verbose
|
||||
if verbose:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
if debug_domains:
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
else:
|
||||
bb.msg.loggerDefaultDomains = []
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
|
||||
def constructLogOptions():
|
||||
debug = loggerDefaultDebugLevel
|
||||
@@ -182,12 +178,9 @@ def constructLogOptions():
|
||||
debug_domains["BitBake.%s" % domainarg] = logging.DEBUG - dlevel + 1
|
||||
return level, debug_domains
|
||||
|
||||
def addDefaultlogFilter(handler, cls = BBLogFilter, forcelevel=None):
|
||||
def addDefaultlogFilter(handler, cls = BBLogFilter):
|
||||
level, debug_domains = constructLogOptions()
|
||||
|
||||
if forcelevel is not None:
|
||||
level = forcelevel
|
||||
|
||||
cls(handler, level, debug_domains)
|
||||
|
||||
#
|
||||
|
||||
@@ -26,10 +26,9 @@ File parsers for the BitBake build tools.
|
||||
|
||||
handlers = []
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import logging
|
||||
import bb
|
||||
import bb.utils
|
||||
import bb.siggen
|
||||
@@ -71,12 +70,7 @@ def cached_mtime_noerror(f):
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_mtime(f):
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
if f in __mtime_cache:
|
||||
del __mtime_cache[f]
|
||||
return 0
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_cache(f):
|
||||
@@ -87,7 +81,7 @@ def update_cache(f):
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
if s not in deps:
|
||||
deps.append(s)
|
||||
@@ -95,7 +89,7 @@ def mark_dependency(d, f):
|
||||
|
||||
def check_dependency(d, f):
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
return s in deps
|
||||
|
||||
def supports(fn, data):
|
||||
@@ -128,12 +122,12 @@ def resolve_file(fn, d):
|
||||
for af in attempts:
|
||||
mark_dependency(d, af)
|
||||
if not newfn:
|
||||
raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
fn = newfn
|
||||
|
||||
mark_dependency(d, fn)
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError(errno.ENOENT, "file %s not found" % fn)
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
return fn
|
||||
|
||||
@@ -161,8 +155,8 @@ def vars_from_file(mypkg, d):
|
||||
def get_file_depends(d):
|
||||
'''Return the dependent files'''
|
||||
dep_files = []
|
||||
depends = d.getVar('__base_depends', False) or []
|
||||
depends = depends + (d.getVar('__depends', False) or [])
|
||||
depends = d.getVar('__base_depends', True) or []
|
||||
depends = depends + (d.getVar('__depends', True) or [])
|
||||
for (fn, _) in depends:
|
||||
dep_files.append(os.path.abspath(fn))
|
||||
return " ".join(dep_files)
|
||||
|
||||
@@ -21,7 +21,8 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from future_builtins import filter
|
||||
import re
|
||||
import string
|
||||
import logging
|
||||
@@ -69,33 +70,6 @@ class ExportNode(AstNode):
|
||||
def eval(self, data):
|
||||
data.setVarFlag(self.var, "export", 1, op = 'exported')
|
||||
|
||||
class UnsetNode(AstNode):
|
||||
def __init__(self, filename, lineno, var):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.var = var
|
||||
|
||||
def eval(self, data):
|
||||
loginfo = {
|
||||
'variable': self.var,
|
||||
'file': self.filename,
|
||||
'line': self.lineno,
|
||||
}
|
||||
data.delVar(self.var,**loginfo)
|
||||
|
||||
class UnsetFlagNode(AstNode):
|
||||
def __init__(self, filename, lineno, var, flag):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.var = var
|
||||
self.flag = flag
|
||||
|
||||
def eval(self, data):
|
||||
loginfo = {
|
||||
'variable': self.var,
|
||||
'file': self.filename,
|
||||
'line': self.lineno,
|
||||
}
|
||||
data.delVarFlag(self.var, self.flag, **loginfo)
|
||||
|
||||
class DataNode(AstNode):
|
||||
"""
|
||||
Various data related updates. For the sake of sanity
|
||||
@@ -109,9 +83,9 @@ class DataNode(AstNode):
|
||||
|
||||
def getFunc(self, key, data):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], expand=False, noweakdefault=True)
|
||||
return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True, parsing=True)
|
||||
return data.getVar(key, noweakdefault=True)
|
||||
|
||||
def eval(self, data):
|
||||
groupd = self.groupd
|
||||
@@ -162,42 +136,29 @@ class DataNode(AstNode):
|
||||
if flag:
|
||||
data.setVarFlag(key, flag, val, **loginfo)
|
||||
else:
|
||||
data.setVar(key, val, parsing=True, **loginfo)
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = str.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.func_name = func_name
|
||||
self.body = body
|
||||
self.python = python
|
||||
self.fakeroot = fakeroot
|
||||
|
||||
def eval(self, data):
|
||||
text = '\n'.join(self.body)
|
||||
funcname = self.func_name
|
||||
if self.func_name == "__anonymous":
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
|
||||
self.python = True
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body))
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
if data.getVar(funcname, False):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.delVarFlag(funcname, 'python')
|
||||
data.delVarFlag(funcname, 'fakeroot')
|
||||
if self.python:
|
||||
data.setVarFlag(funcname, "python", "1")
|
||||
if self.fakeroot:
|
||||
data.setVarFlag(funcname, "fakeroot", "1")
|
||||
data.setVarFlag(funcname, "func", 1)
|
||||
data.setVar(funcname, text, parsing=True)
|
||||
data.setVarFlag(funcname, 'filename', self.filename)
|
||||
data.setVarFlag(funcname, 'lineno', str(self.lineno - len(self.body)))
|
||||
data.setVar(funcname, text)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, text)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, modulename, body):
|
||||
@@ -211,12 +172,31 @@ class PythonMethodNode(AstNode):
|
||||
# 'this' file. This means we will not parse methods from
|
||||
# bb classes twice
|
||||
text = '\n'.join(self.body)
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename, self.lineno - len(self.body) - 1)
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text, parsing=True)
|
||||
data.setVarFlag(self.function, 'filename', self.filename)
|
||||
data.setVarFlag(self.function, 'lineno', str(self.lineno - len(self.body) - 1))
|
||||
data.setVar(self.function, text)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.key = key
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
data.setVarFlag(self.key, 'fakeroot', None)
|
||||
if self.m.group("py") is not None:
|
||||
data.setVarFlag(self.key, "python", "1")
|
||||
else:
|
||||
data.delVarFlag(self.key, "python")
|
||||
if self.m.group("fr") is not None:
|
||||
data.setVarFlag(self.key, "fakeroot", "1")
|
||||
else:
|
||||
data.delVarFlag(self.key, "fakeroot")
|
||||
|
||||
class ExportFuncsNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns, classname):
|
||||
@@ -229,28 +209,26 @@ class ExportFuncsNode(AstNode):
|
||||
for func in self.n:
|
||||
calledfunc = self.classname + "_" + func
|
||||
|
||||
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
|
||||
if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
|
||||
continue
|
||||
|
||||
if data.getVar(func, False):
|
||||
if data.getVar(func):
|
||||
data.setVarFlag(func, 'python', None)
|
||||
data.setVarFlag(func, 'func', None)
|
||||
|
||||
for flag in [ "func", "python" ]:
|
||||
if data.getVarFlag(calledfunc, flag, False):
|
||||
data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
|
||||
if data.getVarFlag(calledfunc, flag):
|
||||
data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag))
|
||||
for flag in [ "dirs" ]:
|
||||
if data.getVarFlag(func, flag, False):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
|
||||
data.setVarFlag(func, "filename", "autogenerated")
|
||||
data.setVarFlag(func, "lineno", 1)
|
||||
if data.getVarFlag(func, flag):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
|
||||
|
||||
if data.getVarFlag(calledfunc, "python", False):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
|
||||
if data.getVarFlag(calledfunc, "python"):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
|
||||
else:
|
||||
if "-" in self.classname:
|
||||
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
|
||||
data.setVar(func, " " + calledfunc + "\n", parsing=True)
|
||||
data.setVar(func, " " + calledfunc + "\n")
|
||||
data.setVarFlag(func, 'export_func', '1')
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
@@ -277,7 +255,7 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS', False) or []
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
@@ -297,21 +275,18 @@ def handleInclude(statements, filename, lineno, m, force):
|
||||
def handleExport(statements, filename, lineno, m):
|
||||
statements.append(ExportNode(filename, lineno, m.group(1)))
|
||||
|
||||
def handleUnset(statements, filename, lineno, m):
|
||||
statements.append(UnsetNode(filename, lineno, m.group(1)))
|
||||
|
||||
def handleUnsetFlag(statements, filename, lineno, m):
|
||||
statements.append(UnsetFlagNode(filename, lineno, m.group(1), m.group(2)))
|
||||
|
||||
def handleData(statements, filename, lineno, groupd):
|
||||
statements.append(DataNode(filename, lineno, groupd))
|
||||
|
||||
def handleMethod(statements, filename, lineno, func_name, body, python, fakeroot):
|
||||
statements.append(MethodNode(filename, lineno, func_name, body, python, fakeroot))
|
||||
def handleMethod(statements, filename, lineno, func_name, body):
|
||||
statements.append(MethodNode(filename, lineno, func_name, body))
|
||||
|
||||
def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
|
||||
statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
|
||||
|
||||
def handleMethodFlags(statements, filename, lineno, key, m):
|
||||
statements.append(MethodFlagsNode(filename, lineno, key, m))
|
||||
|
||||
def handleExportFuncs(statements, filename, lineno, m, classname):
|
||||
statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
|
||||
|
||||
@@ -339,35 +314,30 @@ def handleInherit(statements, filename, lineno, m):
|
||||
statements.append(InheritNode(filename, lineno, classes))
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
saved_handlers = bb.event.get_handlers().copy()
|
||||
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
# try to add the handler
|
||||
handlerfn = d.getVarFlag(var, "filename", False)
|
||||
if not handlerfn:
|
||||
bb.fatal("Undefined event handler function '%s'" % var)
|
||||
handlerln = int(d.getVarFlag(var, "lineno", False))
|
||||
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
|
||||
bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS", False) or []:
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
deltasklist = d.getVar('__BBDELTASKS') or []
|
||||
bb.build.add_tasks(tasklist, deltasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
bb.event.set_handlers(saved_handlers)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
@@ -377,17 +347,17 @@ def _create_variants(datastores, names, function, onlyfinalise):
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
|
||||
for variant in list(datastores.keys()):
|
||||
for variant, variant_d in datastores.items():
|
||||
for name in names:
|
||||
if not variant:
|
||||
# Based on main recipe
|
||||
create_variant(name, datastores[""])
|
||||
create_variant(name, variant_d)
|
||||
else:
|
||||
create_variant("%s-%s" % (variant, name), datastores[variant], name)
|
||||
create_variant("%s-%s" % (variant, name), variant_d, name)
|
||||
|
||||
def _expand_versions(versions):
|
||||
def expand_one(version, start, end):
|
||||
for i in range(start, end + 1):
|
||||
for i in xrange(start, end + 1):
|
||||
ver = _bbversions_re.sub(str(i), version, 1)
|
||||
yield ver
|
||||
|
||||
@@ -496,13 +466,17 @@ def multi_finalize(fn, d):
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
|
||||
for variant in datastores.keys():
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, datastores[variant], variant)
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
datastores[variant].setVar("__SKIPPED", e.args[0])
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
variants = filter(None, datastores.iterkeys())
|
||||
safe_d.setVar("__VARIANTS", " ".join(variants))
|
||||
|
||||
datastores[""] = d
|
||||
return datastores
|
||||
|
||||
@@ -25,14 +25,14 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
import re, bb, os
|
||||
import logging
|
||||
import bb.build, bb.utils
|
||||
from bb import data
|
||||
|
||||
from . import ConfHandler
|
||||
from .. import resolve_file, ast, logger, ParseError
|
||||
from .. import resolve_file, ast, logger
|
||||
from .ConfHandler import include, init
|
||||
|
||||
# For compatibility
|
||||
@@ -47,26 +47,37 @@ __addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
|
||||
__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
|
||||
__infunc__ = []
|
||||
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
|
||||
cached_statements = {}
|
||||
|
||||
# We need to indicate EOF to the feeder. This code is so messy that
|
||||
# factoring it out to a close_parse_file method is out of question.
|
||||
# We will use the IN_PYTHON_EOF as an indicator to just close the method
|
||||
#
|
||||
# The two parts using it are tightly integrated anyway
|
||||
IN_PYTHON_EOF = -9999999999999
|
||||
|
||||
|
||||
|
||||
def supports(fn, d):
|
||||
"""Return True if fn has a supported extension"""
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not os.path.isabs(file):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
dname = os.path.dirname(fn)
|
||||
bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
|
||||
for af in attempts:
|
||||
if af != abs_fn:
|
||||
@@ -79,7 +90,7 @@ def inherit(files, fn, lineno, d):
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -100,7 +111,7 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
file.close()
|
||||
if __inpython__:
|
||||
# add a blank line to close out any python definition
|
||||
feeder(lineno, "", filename, base_name, statements, eof=True)
|
||||
feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
|
||||
|
||||
if filename.endswith(".bbclass") or filename.endswith(".inc"):
|
||||
cached_statements[absolute_filename] = statements
|
||||
@@ -109,7 +120,7 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
def handle(fn, d, include):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
|
||||
__body__ = []
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
@@ -119,13 +130,13 @@ def handle(fn, d, include):
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE', False)
|
||||
oldfile = d.getVar('FILE')
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -138,7 +149,7 @@ def handle(fn, d, include):
|
||||
statements = get_statements(fn, abs_fn, base_name)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
if ext != ".bbclass" and abs_fn != oldfile:
|
||||
if ext != ".bbclass":
|
||||
d.setVar('FILE', abs_fn)
|
||||
|
||||
try:
|
||||
@@ -148,26 +159,21 @@ def handle(fn, d, include):
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
if __infunc__:
|
||||
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
|
||||
if __residue__:
|
||||
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
|
||||
|
||||
if ext != ".bbclass" and include == 0:
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
|
||||
return d
|
||||
|
||||
def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
def feeder(lineno, s, fn, root, statements):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__, __infunc__[3], __infunc__[4])
|
||||
__infunc__ = []
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
@@ -175,7 +181,7 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
|
||||
if __inpython__:
|
||||
m = __python_func_regexp__.match(s)
|
||||
if m and not eof:
|
||||
if m and lineno != IN_PYTHON_EOF:
|
||||
__body__.append(s)
|
||||
return
|
||||
else:
|
||||
@@ -184,7 +190,7 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
__body__ = []
|
||||
__inpython__ = False
|
||||
|
||||
if eof:
|
||||
if lineno == IN_PYTHON_EOF:
|
||||
return
|
||||
|
||||
if s and s[0] == '#':
|
||||
@@ -211,7 +217,8 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = [m.group("func") or "__anonymous", fn, lineno, m.group("py") is not None, m.group("fr") is not None]
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
|
||||
@@ -24,9 +24,8 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import os
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
|
||||
@@ -57,11 +56,9 @@ __config_regexp__ = re.compile( r"""
|
||||
__include_regexp__ = re.compile( r"include\s+(.+)" )
|
||||
__require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)\[([a-zA-Z0-9\-_+.${}/]+)\]$" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR', False)
|
||||
topdir = data.getVar('TOPDIR')
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
|
||||
@@ -86,26 +83,21 @@ def include(parentfn, fn, lineno, data, error_out):
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
try:
|
||||
bb.parse.handle(fn, data, True)
|
||||
except (IOError, OSError) as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
else:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
|
||||
else:
|
||||
raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno)
|
||||
ret = bb.parse.handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
@@ -121,7 +113,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE', False)
|
||||
oldfile = data.getVar('FILE')
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
@@ -187,16 +179,6 @@ def feeder(lineno, s, fn, statements):
|
||||
ast.handleExport(statements, fn, lineno, m)
|
||||
return
|
||||
|
||||
m = __unset_regexp__.match(s)
|
||||
if m:
|
||||
ast.handleUnset(statements, fn, lineno, m)
|
||||
return
|
||||
|
||||
m = __unset_flag_regexp__.match(s)
|
||||
if m:
|
||||
ast.handleUnsetFlag(statements, fn, lineno, m)
|
||||
return
|
||||
|
||||
raise ParseError("unparsed line: '%s'" % s, fn, lineno);
|
||||
|
||||
# Add us to the handlers list
|
||||
|
||||
@@ -92,9 +92,9 @@ class SQLTable(collections.MutableMapping):
|
||||
self._execute("DELETE from %s where key=?;" % self.table, [key])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, str):
|
||||
if not isinstance(key, basestring):
|
||||
raise TypeError('Only string keys are supported')
|
||||
elif not isinstance(value, str):
|
||||
elif not isinstance(value, basestring):
|
||||
raise TypeError('Only string values are supported')
|
||||
|
||||
data = self._execute("SELECT * from %s where key=?;" %
|
||||
@@ -178,7 +178,7 @@ class PersistData(object):
|
||||
"""
|
||||
Return a list of key + value pairs for a domain
|
||||
"""
|
||||
return list(self.data[domain].items())
|
||||
return self.data[domain].items()
|
||||
|
||||
def getValue(self, domain, key):
|
||||
"""
|
||||
@@ -201,7 +201,6 @@ class PersistData(object):
|
||||
def connect(database):
|
||||
connection = sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
connection.execute("pragma synchronous = off;")
|
||||
connection.text_factory = str
|
||||
return connection
|
||||
|
||||
def persist(domain, d):
|
||||
|
||||
@@ -17,7 +17,7 @@ class CmdError(RuntimeError):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
if not isinstance(self.command, str):
|
||||
if not isinstance(self.command, basestring):
|
||||
cmd = subprocess.list2cmdline(self.command)
|
||||
else:
|
||||
cmd = self.command
|
||||
@@ -64,7 +64,7 @@ class Popen(subprocess.Popen):
|
||||
options.update(kwargs)
|
||||
subprocess.Popen.__init__(self, *args, **options)
|
||||
|
||||
def _logged_communicate(pipe, log, input, extrafiles):
|
||||
def _logged_communicate(pipe, log, input):
|
||||
if pipe.stdin:
|
||||
if input is not None:
|
||||
pipe.stdin.write(input)
|
||||
@@ -79,26 +79,10 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
for fobj, _ in extrafiles:
|
||||
bb.utils.nonblockingfd(fobj.fileno())
|
||||
rin.append(fobj)
|
||||
|
||||
def readextras(selected):
|
||||
for fobj, func in extrafiles:
|
||||
if fobj in selected:
|
||||
try:
|
||||
data = fobj.read()
|
||||
except IOError as err:
|
||||
if err.errno == errno.EAGAIN or err.errno == errno.EWOULDBLOCK:
|
||||
data = None
|
||||
if data is not None:
|
||||
func(data)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
rlist = rin
|
||||
stdoutbuf = b""
|
||||
stderrbuf = b""
|
||||
try:
|
||||
r,w,e = select.select (rlist, [], [], 1)
|
||||
except OSError as e:
|
||||
@@ -106,48 +90,29 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
raise
|
||||
|
||||
if pipe.stdout in r:
|
||||
data = stdoutbuf + pipe.stdout.read()
|
||||
if data is not None and len(data) > 0:
|
||||
try:
|
||||
data = data.decode("utf-8")
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
stdoutbuf = b""
|
||||
except UnicodeDecodeError:
|
||||
stdoutbuf = data
|
||||
data = pipe.stdout.read()
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stderr in r:
|
||||
data = stderrbuf + pipe.stderr.read()
|
||||
if data is not None and len(data) > 0:
|
||||
try:
|
||||
data = data.decode("utf-8")
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
stderrbuf = b""
|
||||
except UnicodeDecodeError:
|
||||
stderrbuf = data
|
||||
|
||||
readextras(r)
|
||||
|
||||
data = pipe.stderr.read()
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
finally:
|
||||
log.flush()
|
||||
|
||||
readextras([fobj for fobj, _ in extrafiles])
|
||||
|
||||
if pipe.stdout is not None:
|
||||
pipe.stdout.close()
|
||||
if pipe.stderr is not None:
|
||||
pipe.stderr.close()
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
"""Convenience function to run a command and return its output, raising an
|
||||
exception when the command fails"""
|
||||
|
||||
if not extrafiles:
|
||||
extrafiles = []
|
||||
|
||||
if isinstance(cmd, str) and not "shell" in options:
|
||||
if isinstance(cmd, basestring) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
try:
|
||||
@@ -159,13 +124,9 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
raise CmdError(cmd, exc)
|
||||
|
||||
if log:
|
||||
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
|
||||
stdout, stderr = _logged_communicate(pipe, log, input)
|
||||
else:
|
||||
stdout, stderr = pipe.communicate(input)
|
||||
if stdout:
|
||||
stdout = stdout.decode("utf-8")
|
||||
if stderr:
|
||||
stderr = stderr.decode("utf-8")
|
||||
|
||||
if pipe.returncode != 0:
|
||||
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
|
||||
|
||||
@@ -1,276 +0,0 @@
|
||||
"""
|
||||
BitBake progress handling code
|
||||
"""
|
||||
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import time
|
||||
import inspect
|
||||
import bb.event
|
||||
import bb.build
|
||||
|
||||
class ProgressHandler(object):
|
||||
"""
|
||||
Base class that can pretend to be a file object well enough to be
|
||||
used to build objects to intercept console output and determine the
|
||||
progress of some operation.
|
||||
"""
|
||||
def __init__(self, d, outfile=None):
|
||||
self._progress = 0
|
||||
self._data = d
|
||||
self._lastevent = 0
|
||||
if outfile:
|
||||
self._outfile = outfile
|
||||
else:
|
||||
self._outfile = sys.stdout
|
||||
|
||||
def _fire_progress(self, taskprogress, rate=None):
|
||||
"""Internal function to fire the progress event"""
|
||||
bb.event.fire(bb.build.TaskProgress(taskprogress, rate), self._data)
|
||||
|
||||
def write(self, string):
|
||||
self._outfile.write(string)
|
||||
|
||||
def flush(self):
|
||||
self._outfile.flush()
|
||||
|
||||
def update(self, progress, rate=None):
|
||||
ts = time.time()
|
||||
if progress > 100:
|
||||
progress = 100
|
||||
if progress != self._progress or self._lastevent + 1 < ts:
|
||||
self._fire_progress(progress, rate)
|
||||
self._lastevent = ts
|
||||
self._progress = progress
|
||||
|
||||
class LineFilterProgressHandler(ProgressHandler):
|
||||
"""
|
||||
A ProgressHandler variant that provides the ability to filter out
|
||||
the lines if they contain progress information. Additionally, it
|
||||
filters out anything before the last line feed on a line. This can
|
||||
be used to keep the logs clean of output that we've only enabled for
|
||||
getting progress, assuming that that can be done on a per-line
|
||||
basis.
|
||||
"""
|
||||
def __init__(self, d, outfile=None):
|
||||
self._linebuffer = ''
|
||||
super(LineFilterProgressHandler, self).__init__(d, outfile)
|
||||
|
||||
def write(self, string):
|
||||
self._linebuffer += string
|
||||
while True:
|
||||
breakpos = self._linebuffer.find('\n') + 1
|
||||
if breakpos == 0:
|
||||
break
|
||||
line = self._linebuffer[:breakpos]
|
||||
self._linebuffer = self._linebuffer[breakpos:]
|
||||
# Drop any line feeds and anything that precedes them
|
||||
lbreakpos = line.rfind('\r') + 1
|
||||
if lbreakpos:
|
||||
line = line[lbreakpos:]
|
||||
if self.writeline(line):
|
||||
super(LineFilterProgressHandler, self).write(line)
|
||||
|
||||
def writeline(self, line):
|
||||
return True
|
||||
|
||||
class BasicProgressHandler(ProgressHandler):
|
||||
def __init__(self, d, regex=r'(\d+)%', outfile=None):
|
||||
super(BasicProgressHandler, self).__init__(d, outfile)
|
||||
self._regex = re.compile(regex)
|
||||
# Send an initial progress event so the bar gets shown
|
||||
self._fire_progress(0)
|
||||
|
||||
def write(self, string):
|
||||
percs = self._regex.findall(string)
|
||||
if percs:
|
||||
progress = int(percs[-1])
|
||||
self.update(progress)
|
||||
super(BasicProgressHandler, self).write(string)
|
||||
|
||||
class OutOfProgressHandler(ProgressHandler):
|
||||
def __init__(self, d, regex, outfile=None):
|
||||
super(OutOfProgressHandler, self).__init__(d, outfile)
|
||||
self._regex = re.compile(regex)
|
||||
# Send an initial progress event so the bar gets shown
|
||||
self._fire_progress(0)
|
||||
|
||||
def write(self, string):
|
||||
nums = self._regex.findall(string)
|
||||
if nums:
|
||||
progress = (float(nums[-1][0]) / float(nums[-1][1])) * 100
|
||||
self.update(progress)
|
||||
super(OutOfProgressHandler, self).write(string)
|
||||
|
||||
class MultiStageProgressReporter(object):
|
||||
"""
|
||||
Class which allows reporting progress without the caller
|
||||
having to know where they are in the overall sequence. Useful
|
||||
for tasks made up of python code spread across multiple
|
||||
classes / functions - the progress reporter object can
|
||||
be passed around or stored at the object level and calls
|
||||
to next_stage() and update() made whereever needed.
|
||||
"""
|
||||
def __init__(self, d, stage_weights, debug=False):
|
||||
"""
|
||||
Initialise the progress reporter.
|
||||
|
||||
Parameters:
|
||||
* d: the datastore (needed for firing the events)
|
||||
* stage_weights: a list of weight values, one for each stage.
|
||||
The value is scaled internally so you only need to specify
|
||||
values relative to other values in the list, so if there
|
||||
are two stages and the first takes 2s and the second takes
|
||||
10s you would specify [2, 10] (or [1, 5], it doesn't matter).
|
||||
* debug: specify True (and ensure you call finish() at the end)
|
||||
in order to show a printout of the calculated stage weights
|
||||
based on timing each stage. Use this to determine what the
|
||||
weights should be when you're not sure.
|
||||
"""
|
||||
self._data = d
|
||||
total = sum(stage_weights)
|
||||
self._stage_weights = [float(x)/total for x in stage_weights]
|
||||
self._stage = -1
|
||||
self._base_progress = 0
|
||||
# Send an initial progress event so the bar gets shown
|
||||
self._fire_progress(0)
|
||||
self._debug = debug
|
||||
self._finished = False
|
||||
if self._debug:
|
||||
self._last_time = time.time()
|
||||
self._stage_times = []
|
||||
self._stage_total = None
|
||||
self._callers = []
|
||||
|
||||
def _fire_progress(self, taskprogress):
|
||||
bb.event.fire(bb.build.TaskProgress(taskprogress), self._data)
|
||||
|
||||
def next_stage(self, stage_total=None):
|
||||
"""
|
||||
Move to the next stage.
|
||||
Parameters:
|
||||
* stage_total: optional total for progress within the stage,
|
||||
see update() for details
|
||||
NOTE: you need to call this before the first stage.
|
||||
"""
|
||||
self._stage += 1
|
||||
self._stage_total = stage_total
|
||||
if self._stage == 0:
|
||||
# First stage
|
||||
if self._debug:
|
||||
self._last_time = time.time()
|
||||
else:
|
||||
if self._stage < len(self._stage_weights):
|
||||
self._base_progress = sum(self._stage_weights[:self._stage]) * 100
|
||||
if self._debug:
|
||||
currtime = time.time()
|
||||
self._stage_times.append(currtime - self._last_time)
|
||||
self._last_time = currtime
|
||||
self._callers.append(inspect.getouterframes(inspect.currentframe())[1])
|
||||
elif not self._debug:
|
||||
bb.warn('ProgressReporter: current stage beyond declared number of stages')
|
||||
self._base_progress = 100
|
||||
self._fire_progress(self._base_progress)
|
||||
|
||||
def update(self, stage_progress):
|
||||
"""
|
||||
Update progress within the current stage.
|
||||
Parameters:
|
||||
* stage_progress: progress value within the stage. If stage_total
|
||||
was specified when next_stage() was last called, then this
|
||||
value is considered to be out of stage_total, otherwise it should
|
||||
be a percentage value from 0 to 100.
|
||||
"""
|
||||
if self._stage_total:
|
||||
stage_progress = (float(stage_progress) / self._stage_total) * 100
|
||||
if self._stage < 0:
|
||||
bb.warn('ProgressReporter: update called before first call to next_stage()')
|
||||
elif self._stage < len(self._stage_weights):
|
||||
progress = self._base_progress + (stage_progress * self._stage_weights[self._stage])
|
||||
else:
|
||||
progress = self._base_progress
|
||||
if progress > 100:
|
||||
progress = 100
|
||||
self._fire_progress(progress)
|
||||
|
||||
def finish(self):
|
||||
if self._finished:
|
||||
return
|
||||
self._finished = True
|
||||
if self._debug:
|
||||
import math
|
||||
self._stage_times.append(time.time() - self._last_time)
|
||||
mintime = max(min(self._stage_times), 0.01)
|
||||
self._callers.append(None)
|
||||
stage_weights = [int(math.ceil(x / mintime)) for x in self._stage_times]
|
||||
bb.warn('Stage weights: %s' % stage_weights)
|
||||
out = []
|
||||
for stage_weight, caller in zip(stage_weights, self._callers):
|
||||
if caller:
|
||||
out.append('Up to %s:%d: %d' % (caller[1], caller[2], stage_weight))
|
||||
else:
|
||||
out.append('Up to finish: %d' % stage_weight)
|
||||
bb.warn('Stage times:\n %s' % '\n '.join(out))
|
||||
|
||||
class MultiStageProcessProgressReporter(MultiStageProgressReporter):
|
||||
"""
|
||||
Version of MultiStageProgressReporter intended for use with
|
||||
standalone processes (such as preparing the runqueue)
|
||||
"""
|
||||
def __init__(self, d, processname, stage_weights, debug=False):
|
||||
self._processname = processname
|
||||
self._started = False
|
||||
MultiStageProgressReporter.__init__(self, d, stage_weights, debug)
|
||||
|
||||
def start(self):
|
||||
if not self._started:
|
||||
bb.event.fire(bb.event.ProcessStarted(self._processname, 100), self._data)
|
||||
self._started = True
|
||||
|
||||
def _fire_progress(self, taskprogress):
|
||||
if taskprogress == 0:
|
||||
self.start()
|
||||
return
|
||||
bb.event.fire(bb.event.ProcessProgress(self._processname, taskprogress), self._data)
|
||||
|
||||
def finish(self):
|
||||
MultiStageProgressReporter.finish(self)
|
||||
bb.event.fire(bb.event.ProcessFinished(self._processname), self._data)
|
||||
|
||||
class DummyMultiStageProcessProgressReporter(MultiStageProgressReporter):
|
||||
"""
|
||||
MultiStageProcessProgressReporter that takes the calls and does nothing
|
||||
with them (to avoid a bunch of "if progress_reporter:" checks)
|
||||
"""
|
||||
def __init__(self):
|
||||
MultiStageProcessProgressReporter.__init__(self, "", None, [])
|
||||
|
||||
def _fire_progress(self, taskprogress, rate=None):
|
||||
pass
|
||||
|
||||
def start(self):
|
||||
pass
|
||||
|
||||
def next_stage(self, stage_total=None):
|
||||
pass
|
||||
|
||||
def update(self, stage_progress):
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
@@ -121,14 +121,11 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
preferred_file = None
|
||||
preferred_ver = None
|
||||
|
||||
# pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
|
||||
# hence we do this manually rather than use OVERRIDES
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True)
|
||||
if not preferred_v:
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True)
|
||||
if not preferred_v:
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION", True)
|
||||
localdata = data.createCopy(cfgData)
|
||||
localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
preferred_v = localdata.getVar('PREFERRED_VERSION', True)
|
||||
if preferred_v:
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
@@ -226,7 +223,7 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
def _filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
"""
|
||||
eligible = []
|
||||
preferred_versions = {}
|
||||
@@ -245,7 +242,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||
pkg_pn[pn] = []
|
||||
pkg_pn[pn].append(p)
|
||||
|
||||
logger.debug(1, "providers for %s are: %s", item, list(pkg_pn.keys()))
|
||||
logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
|
||||
|
||||
# First add PREFERRED_VERSIONS
|
||||
for pn in pkg_pn:
|
||||
@@ -283,7 +280,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||
def filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
Takes a "normal" target item
|
||||
"""
|
||||
|
||||
@@ -311,56 +308,38 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||
def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
Takes a "runtime" target item
|
||||
"""
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
# First try and match any PREFERRED_RPROVIDER entry
|
||||
prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True)
|
||||
foundUnique = False
|
||||
if prefervar:
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
if prefervar == pn:
|
||||
logger.verbose("selecting %s to satisfy %s due to PREFERRED_RPROVIDER", pn, item)
|
||||
eligible.remove(p)
|
||||
eligible = [p] + eligible
|
||||
foundUnique = True
|
||||
numberPreferred = 1
|
||||
# Should use dataCache.preferred here?
|
||||
preferred = []
|
||||
preferred_vars = []
|
||||
pns = {}
|
||||
for p in eligible:
|
||||
pns[dataCache.pkg_fn[p]] = p
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
|
||||
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
|
||||
preferred_vars.append(var)
|
||||
pref = pns[prefervar]
|
||||
eligible.remove(pref)
|
||||
eligible = [pref] + eligible
|
||||
preferred.append(pref)
|
||||
break
|
||||
|
||||
# If we didn't find an RPROVIDER entry, try and infer the provider from PREFERRED_PROVIDER entries
|
||||
# by looking through the provides of each eligible recipe and seeing if a PREFERRED_PROVIDER was set.
|
||||
# This is most useful for virtual/ entries rather than having a RPROVIDER per entry.
|
||||
if not foundUnique:
|
||||
# Should use dataCache.preferred here?
|
||||
preferred = []
|
||||
preferred_vars = []
|
||||
pns = {}
|
||||
for p in eligible:
|
||||
pns[dataCache.pkg_fn[p]] = p
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
|
||||
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
|
||||
preferred_vars.append(var)
|
||||
pref = pns[prefervar]
|
||||
eligible.remove(pref)
|
||||
eligible = [pref] + eligible
|
||||
preferred.append(pref)
|
||||
break
|
||||
|
||||
numberPreferred = len(preferred)
|
||||
numberPreferred = len(preferred)
|
||||
|
||||
if numberPreferred > 1:
|
||||
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item))
|
||||
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
|
||||
|
||||
logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
|
||||
|
||||
@@ -400,32 +379,3 @@ def getRuntimeProviders(dataCache, rdepend):
|
||||
logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
|
||||
|
||||
return rproviders
|
||||
|
||||
|
||||
def buildWorldTargetList(dataCache, task=None):
|
||||
"""
|
||||
Build package list for "bitbake world"
|
||||
"""
|
||||
if dataCache.world_target:
|
||||
return
|
||||
|
||||
logger.debug(1, "collating packages for \"world\"")
|
||||
for f in dataCache.possible_world:
|
||||
terminal = True
|
||||
pn = dataCache.pkg_fn[f]
|
||||
if task and task not in dataCache.task_deps[f]['tasks']:
|
||||
logger.debug(2, "World build skipping %s as task %s doesn't exist", f, task)
|
||||
terminal = False
|
||||
|
||||
for p in dataCache.pn_provides[pn]:
|
||||
if p.startswith('virtual/'):
|
||||
logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
|
||||
terminal = False
|
||||
break
|
||||
for pf in dataCache.providers[p]:
|
||||
if dataCache.pkg_fn[pf] != pn:
|
||||
logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
|
||||
terminal = False
|
||||
break
|
||||
if terminal:
|
||||
dataCache.world_target.add(pn)
|
||||
|
||||
@@ -527,7 +527,7 @@ def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
|
||||
print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
|
||||
|
||||
# Scan pattern arguments and append a space if necessary
|
||||
for i in range(len(args)):
|
||||
for i in xrange(len(args)):
|
||||
if not RE_SED.search(args[i]):
|
||||
continue
|
||||
args[i] = args[i] + ' '
|
||||
|
||||
@@ -474,7 +474,7 @@ class Environment:
|
||||
"""
|
||||
# Save and remove previous arguments
|
||||
prevargs = []
|
||||
for i in range(int(self._env['#'])):
|
||||
for i in xrange(int(self._env['#'])):
|
||||
i = str(i+1)
|
||||
prevargs.append(self._env[i])
|
||||
del self._env[i]
|
||||
@@ -488,7 +488,7 @@ class Environment:
|
||||
return prevargs
|
||||
|
||||
def get_positional_args(self):
|
||||
return [self._env[str(i+1)] for i in range(int(self._env['#']))]
|
||||
return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
|
||||
|
||||
def get_variables(self):
|
||||
return dict(self._env)
|
||||
|
||||
@@ -20,7 +20,7 @@ except NameError:
|
||||
from Set import Set as set
|
||||
|
||||
from ply import lex
|
||||
from bb.pysh.sherrors import *
|
||||
from sherrors import *
|
||||
|
||||
class NeedMore(Exception):
|
||||
pass
|
||||
|
||||
@@ -10,11 +10,11 @@
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import bb.pysh.pyshlex as pyshlex
|
||||
import pyshlex
|
||||
tokens = pyshlex.tokens
|
||||
|
||||
from ply import yacc
|
||||
import bb.pysh.sherrors as sherrors
|
||||
import sherrors
|
||||
|
||||
class IORedirect:
|
||||
def __init__(self, op, filename, io_number=None):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -63,9 +63,6 @@ class BitBakeBaseServerConnection():
|
||||
def terminate(self):
|
||||
pass
|
||||
|
||||
def setupEventQueue(self):
|
||||
pass
|
||||
|
||||
|
||||
""" BitBakeBaseServer class is the common ancestor to all Bitbake servers
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ import signal
|
||||
import sys
|
||||
import time
|
||||
import select
|
||||
from queue import Empty
|
||||
from Queue import Empty
|
||||
from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
|
||||
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
@@ -53,12 +53,10 @@ class ServerCommunicator():
|
||||
while True:
|
||||
# don't let the user ctrl-c while we're waiting for a response
|
||||
try:
|
||||
for idx in range(0,4): # 0, 1, 2, 3
|
||||
if self.connection.poll(5):
|
||||
return self.connection.recv()
|
||||
else:
|
||||
bb.warn("Timeout while attempting to communicate with bitbake server")
|
||||
bb.fatal("Gave up; Too many tries: timeout while attempting to communicate with bitbake server")
|
||||
if self.connection.poll(20):
|
||||
return self.connection.recv()
|
||||
else:
|
||||
bb.fatal("Timeout while attempting to communicate with bitbake server")
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
@@ -99,7 +97,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
def run(self):
|
||||
for event in bb.event.ui_queue:
|
||||
self.event_queue.put(event)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self, True)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self)
|
||||
|
||||
bb.cooker.server_main(self.cooker, self.main)
|
||||
|
||||
@@ -108,7 +106,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
# the UI and communicated to us
|
||||
self.quitin.close()
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
bb.utils.set_process_name("Cooker")
|
||||
while not self.quit:
|
||||
try:
|
||||
if self.command_channel.poll():
|
||||
@@ -117,10 +114,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
if self.quitout.poll():
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
try:
|
||||
self.runCommand(["stateForceShutdown"])
|
||||
except:
|
||||
pass
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
@@ -130,14 +123,11 @@ class ProcessServer(Process, BaseImplServer):
|
||||
bb.event.unregister_UIHhandler(self.event_handle.value)
|
||||
self.command_channel.close()
|
||||
self.cooker.shutdown(True)
|
||||
self.quitout.close()
|
||||
|
||||
def idle_commands(self, delay, fds=None):
|
||||
def idle_commands(self, delay, fds = []):
|
||||
nextsleep = delay
|
||||
if not fds:
|
||||
fds = []
|
||||
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
@@ -145,7 +135,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
nextsleep = None
|
||||
elif retval is True:
|
||||
nextsleep = None
|
||||
elif isinstance(retval, float) and nextsleep:
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
elif nextsleep is None:
|
||||
@@ -154,9 +144,8 @@ class ProcessServer(Process, BaseImplServer):
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as exc:
|
||||
if not isinstance(exc, bb.BBHandledException):
|
||||
logger.exception('Running idle function')
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
|
||||
@@ -180,16 +169,12 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
self.event_queue = event_queue
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
|
||||
self.events = self.event_queue
|
||||
self.terminated = False
|
||||
|
||||
def sigterm_terminate(self):
|
||||
bb.error("UI received SIGTERM")
|
||||
self.terminate()
|
||||
|
||||
def terminate(self):
|
||||
if self.terminated:
|
||||
return
|
||||
self.terminated = True
|
||||
def flushevents():
|
||||
while True:
|
||||
try:
|
||||
@@ -213,19 +198,19 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
# Wrap Queue to provide API which isn't server implementation specific
|
||||
class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
def __init__(self, maxsize):
|
||||
multiprocessing.queues.Queue.__init__(self, maxsize, ctx=multiprocessing.get_context())
|
||||
multiprocessing.queues.Queue.__init__(self, maxsize)
|
||||
self.exit = False
|
||||
bb.utils.set_process_name("ProcessEQueue")
|
||||
|
||||
def setexit(self):
|
||||
self.exit = True
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
return self.getEvent()
|
||||
sys.exit(1)
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
return self.getEvent()
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
@@ -234,15 +219,14 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
return None
|
||||
|
||||
|
||||
class BitBakeServer(BitBakeBaseServer):
|
||||
def initServer(self, single_use=True):
|
||||
def initServer(self):
|
||||
# establish communication channels. We use bidirectional pipes for
|
||||
# ui <--> server command/response pairs
|
||||
# and a queue for server -> ui event notifications
|
||||
|
||||
@@ -31,33 +31,31 @@
|
||||
in the server's main loop.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import hashlib
|
||||
import time
|
||||
import socket
|
||||
import signal
|
||||
import threading
|
||||
import pickle
|
||||
import inspect
|
||||
import select
|
||||
import http.client
|
||||
import xmlrpc.client
|
||||
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
|
||||
import bb
|
||||
import xmlrpclib, sys
|
||||
from bb import daemonize
|
||||
from bb.ui import uievent
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
import hashlib, time
|
||||
import socket
|
||||
import os, signal
|
||||
import threading
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
DEBUG = False
|
||||
|
||||
class BBTransport(xmlrpc.client.Transport):
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import inspect, select, httplib
|
||||
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
|
||||
class BBTransport(xmlrpclib.Transport):
|
||||
def __init__(self, timeout):
|
||||
self.timeout = timeout
|
||||
self.connection_token = None
|
||||
xmlrpc.client.Transport.__init__(self)
|
||||
xmlrpclib.Transport.__init__(self)
|
||||
|
||||
# Modified from default to pass timeout to HTTPConnection
|
||||
def make_connection(self, host):
|
||||
@@ -69,7 +67,7 @@ class BBTransport(xmlrpc.client.Transport):
|
||||
# create a HTTP connection object from a host descriptor
|
||||
chost, self._extra_headers, x509 = self.get_host_info(host)
|
||||
#store the host argument along with the connection object
|
||||
self._connection = host, http.client.HTTPConnection(chost, timeout=self.timeout)
|
||||
self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
|
||||
return self._connection[1]
|
||||
|
||||
def set_connection_token(self, token):
|
||||
@@ -78,30 +76,13 @@ class BBTransport(xmlrpc.client.Transport):
|
||||
def send_content(self, h, body):
|
||||
if self.connection_token:
|
||||
h.putheader("Bitbake-token", self.connection_token)
|
||||
xmlrpc.client.Transport.send_content(self, h, body)
|
||||
xmlrpclib.Transport.send_content(self, h, body)
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpc.client.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True, use_builtin_types=True)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
def check_connection(remote, timeout):
|
||||
try:
|
||||
host, port = remote.split(":")
|
||||
port = int(port)
|
||||
except Exception as e:
|
||||
bb.warn("Failed to read remote definition (%s)" % str(e))
|
||||
raise e
|
||||
|
||||
server, _transport = _create_server(host, port, timeout)
|
||||
try:
|
||||
ret, err = server.runCommand(['getVariable', 'TOPDIR'])
|
||||
if err or not ret:
|
||||
return False
|
||||
except ConnectionError:
|
||||
return False
|
||||
return True
|
||||
|
||||
class BitBakeServerCommands():
|
||||
|
||||
def __init__(self, server):
|
||||
@@ -116,10 +97,10 @@ class BitBakeServerCommands():
|
||||
|
||||
# we don't allow connections if the cooker is running
|
||||
if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
|
||||
return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.cooker.state)
|
||||
return None
|
||||
|
||||
self.event_handle = bb.event.register_UIHhandler(s, True)
|
||||
return self.event_handle, 'OK'
|
||||
self.event_handle = bb.event.register_UIHhandler(s)
|
||||
return self.event_handle
|
||||
|
||||
def unregisterEventHandler(self, handlerNum):
|
||||
"""
|
||||
@@ -147,7 +128,7 @@ class BitBakeServerCommands():
|
||||
def addClient(self):
|
||||
if self.has_client:
|
||||
return None
|
||||
token = hashlib.md5(str(time.time()).encode("utf-8")).hexdigest()
|
||||
token = hashlib.md5(str(time.time())).hexdigest()
|
||||
self.server.set_connection_token(token)
|
||||
self.has_client = True
|
||||
return token
|
||||
@@ -197,7 +178,7 @@ class XMLRPCProxyServer(BaseImplServer):
|
||||
""" not a real working server, but a stub for a proxy server connection
|
||||
|
||||
"""
|
||||
def __init__(self, host, port, use_builtin_types=True):
|
||||
def __init__(self, host, port):
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
@@ -205,12 +186,13 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
# remove this when you're done with debugging
|
||||
# allow_reuse_address = True
|
||||
|
||||
def __init__(self, interface, single_use=False, idle_timeout=0):
|
||||
def __init__(self, interface):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
BaseImplServer.__init__(self)
|
||||
self.single_use = single_use
|
||||
if (interface[1] == 0): # anonymous port, not getting reused
|
||||
self.single_use = True
|
||||
# Use auto port configuration
|
||||
if (interface[1] == -1):
|
||||
interface = (interface[0], 0)
|
||||
@@ -223,10 +205,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
self.commands = BitBakeServerCommands(self)
|
||||
self.autoregister_all_functions(self.commands, "")
|
||||
self.interface = interface
|
||||
self.time = time.time()
|
||||
self.idle_timeout = idle_timeout
|
||||
if idle_timeout:
|
||||
self.register_idle_function(self.handle_idle_timeout, self)
|
||||
self.single_use = False
|
||||
|
||||
def addcooker(self, cooker):
|
||||
BaseImplServer.addcooker(self, cooker)
|
||||
@@ -242,12 +221,6 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
if name.startswith(prefix):
|
||||
self.register_function(method, name[len(prefix):])
|
||||
|
||||
def handle_idle_timeout(self, server, data, abort):
|
||||
if not abort:
|
||||
if time.time() - server.time > server.idle_timeout:
|
||||
server.quit = True
|
||||
print("Server idle timeout expired")
|
||||
return []
|
||||
|
||||
def serve_forever(self):
|
||||
# Start the actual XMLRPC server
|
||||
@@ -261,7 +234,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
while not self.quit:
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
@@ -290,15 +263,13 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
try:
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
if self.idle_timeout:
|
||||
self.time = time.time()
|
||||
self._handle_request_noblock()
|
||||
except IOError:
|
||||
# we ignore interrupted calls
|
||||
pass
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, True)
|
||||
except:
|
||||
@@ -310,15 +281,12 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
self.connection_token = token
|
||||
|
||||
class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = None):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = []):
|
||||
self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
|
||||
self.clientinfo = clientinfo
|
||||
self.serverImpl = serverImpl
|
||||
self.observer_only = observer_only
|
||||
if featureset:
|
||||
self.featureset = featureset
|
||||
else:
|
||||
self.featureset = []
|
||||
self.featureset = featureset
|
||||
|
||||
def connect(self, token = None):
|
||||
if token is None:
|
||||
@@ -331,9 +299,7 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
return None
|
||||
|
||||
self.transport.set_connection_token(token)
|
||||
return self
|
||||
|
||||
def setupEventQueue(self):
|
||||
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
|
||||
for event in bb.event.ui_queue:
|
||||
self.events.queue_event(event)
|
||||
@@ -345,6 +311,8 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
return self
|
||||
|
||||
def removeClient(self):
|
||||
if not self.observer_only:
|
||||
self.connection.removeClient()
|
||||
@@ -363,10 +331,9 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
pass
|
||||
|
||||
class BitBakeServer(BitBakeBaseServer):
|
||||
def initServer(self, interface = ("localhost", 0),
|
||||
single_use = False, idle_timeout=0):
|
||||
def initServer(self, interface = ("localhost", 0)):
|
||||
self.interface = interface
|
||||
self.serverImpl = XMLRPCServer(interface, single_use, idle_timeout)
|
||||
self.serverImpl = XMLRPCServer(interface)
|
||||
|
||||
def detach(self):
|
||||
daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
|
||||
@@ -411,7 +378,7 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port, use_builtin_types=True)
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,14 +3,18 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import pickle
|
||||
import bb.data
|
||||
from bb.checksum import FileChecksumCache
|
||||
|
||||
logger = logging.getLogger('BitBake.SigGen')
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
||||
|
||||
def init(d):
|
||||
siggens = [obj for obj in globals().values()
|
||||
siggens = [obj for obj in globals().itervalues()
|
||||
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
||||
|
||||
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
|
||||
@@ -33,7 +37,6 @@ class SignatureGenerator(object):
|
||||
self.taskhash = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.taints = {}
|
||||
|
||||
def finalise(self, fn, d, varient):
|
||||
return
|
||||
@@ -41,8 +44,7 @@ class SignatureGenerator(object):
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
return "0"
|
||||
|
||||
def writeout_file_checksum_cache(self):
|
||||
"""Write/update the file checksum cache onto disk"""
|
||||
def set_taskdata(self, hashes, deps, checksum):
|
||||
return
|
||||
|
||||
def stampfile(self, stampbase, file_name, taskname, extrainfo):
|
||||
@@ -61,10 +63,10 @@ class SignatureGenerator(object):
|
||||
return
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints)
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints = data
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values = data
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
@@ -78,19 +80,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.taints = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
||||
self.taskwhitelist = None
|
||||
self.init_rundepcheck(data)
|
||||
checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True)
|
||||
if checksum_cache_file:
|
||||
self.checksum_cache = FileChecksumCache()
|
||||
self.checksum_cache.init_cache(data, checksum_cache_file)
|
||||
else:
|
||||
self.checksum_cache = None
|
||||
|
||||
def init_rundepcheck(self, data):
|
||||
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
||||
@@ -133,7 +128,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
var = lookupcache[dep]
|
||||
if var is not None:
|
||||
data = data + str(var)
|
||||
self.basehash[fn + "." + task] = hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
|
||||
taskdeps[task] = alldeps
|
||||
|
||||
self.taskdeps[fn] = taskdeps
|
||||
@@ -144,14 +139,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
def finalise(self, fn, d, variant):
|
||||
|
||||
mc = d.getVar("__BBMULTICONFIG", False) or ""
|
||||
if variant or mc:
|
||||
fn = bb.cache.realfn2virtual(fn, variant, mc)
|
||||
if variant:
|
||||
fn = "virtual:" + variant + ":" + fn
|
||||
|
||||
try:
|
||||
taskdeps = self._build_data(fn, d)
|
||||
except:
|
||||
bb.warn("Error during finalise of %s" % fn)
|
||||
bb.note("Error during finalise of %s" % fn)
|
||||
raise
|
||||
|
||||
#Slow but can be useful for debugging mismatched basehashes
|
||||
@@ -183,9 +177,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = []
|
||||
self.file_checksum_values[k] = {}
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
|
||||
@@ -196,12 +189,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.runtaskdeps[k].append(dep)
|
||||
|
||||
if task in dataCache.file_checksums[fn]:
|
||||
if self.checksum_cache:
|
||||
checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
else:
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k].append((f,cs))
|
||||
self.file_checksum_values[k][f] = cs
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
@@ -209,37 +199,22 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
taint = str(uuid.uuid4())
|
||||
data = data + taint
|
||||
self.taints[k] = "nostamp:" + taint
|
||||
data = data + str(uuid.uuid4())
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
self.taints[k] = taint
|
||||
logger.warning("%s is tainted from a forced run" % k)
|
||||
logger.warn("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
|
||||
def writeout_file_checksum_cache(self):
|
||||
"""Write/update the file checksum cache onto disk"""
|
||||
if self.checksum_cache:
|
||||
self.checksum_cache.save_extras()
|
||||
self.checksum_cache.save_merge()
|
||||
else:
|
||||
bb.fetch2.fetcher_parse_save()
|
||||
bb.fetch2.fetcher_parse_done()
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
|
||||
k = fn + "." + task
|
||||
referencestamp = stampbase
|
||||
if isinstance(runtime, str) and runtime.startswith("customfile"):
|
||||
if runtime == "customfile":
|
||||
sigfile = stampbase
|
||||
referencestamp = runtime[11:]
|
||||
elif runtime and k in self.taskhash:
|
||||
sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
|
||||
else:
|
||||
@@ -248,7 +223,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
bb.utils.mkdirhier(os.path.dirname(sigfile))
|
||||
|
||||
data = {}
|
||||
data['task'] = task
|
||||
data['basewhitelist'] = self.basewhitelist
|
||||
data['taskwhitelist'] = self.taskwhitelist
|
||||
data['taskdeps'] = self.taskdeps[fn][task]
|
||||
@@ -264,26 +238,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
if runtime and k in self.taskhash:
|
||||
data['runtaskdeps'] = self.runtaskdeps[k]
|
||||
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
|
||||
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k].items()]
|
||||
data['runtaskhashes'] = {}
|
||||
for dep in data['runtaskdeps']:
|
||||
data['runtaskhashes'][dep] = self.taskhash[dep]
|
||||
data['taskhash'] = self.taskhash[k]
|
||||
|
||||
taint = self.read_taint(fn, task, referencestamp)
|
||||
taint = self.read_taint(fn, task, stampbase)
|
||||
if taint:
|
||||
data['taint'] = taint
|
||||
|
||||
if runtime and k in self.taints:
|
||||
if 'nostamp:' in self.taints[k]:
|
||||
data['taint'] = self.taints[k]
|
||||
|
||||
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as stream:
|
||||
p = pickle.dump(data, stream, -1)
|
||||
stream.flush()
|
||||
os.chmod(tmpfile, 0o664)
|
||||
os.chmod(tmpfile, 0664)
|
||||
os.rename(tmpfile, sigfile)
|
||||
except (OSError, IOError) as err:
|
||||
try:
|
||||
@@ -292,27 +261,16 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
pass
|
||||
raise err
|
||||
|
||||
computed_basehash = calc_basehash(data)
|
||||
if computed_basehash != self.basehash[k]:
|
||||
bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
if runtime and k in self.taskhash:
|
||||
computed_taskhash = calc_taskhash(data)
|
||||
if computed_taskhash != self.taskhash[k]:
|
||||
bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
|
||||
|
||||
def dump_sigs(self, dataCaches, options):
|
||||
def dump_sigs(self, dataCache, options):
|
||||
for fn in self.taskdeps:
|
||||
for task in self.taskdeps[fn]:
|
||||
tid = fn + ":" + task
|
||||
(mc, _, _) = bb.runqueue.split_tid(tid)
|
||||
k = fn + "." + task
|
||||
if k not in self.taskhash:
|
||||
continue
|
||||
if dataCaches[mc].basetaskhash[k] != self.basehash[k]:
|
||||
if dataCache.basetaskhash[k] != self.basehash[k]:
|
||||
bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
|
||||
bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[k], self.basehash[k]))
|
||||
self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
|
||||
bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
|
||||
self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
|
||||
|
||||
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
|
||||
name = "basichash"
|
||||
@@ -342,8 +300,7 @@ def dump_this_task(outfile, d):
|
||||
import bb.parse
|
||||
fn = d.getVar("BB_FILENAME", True)
|
||||
task = "do_" + d.getVar("BB_CURRENTTASK", True)
|
||||
referencestamp = bb.build.stamp_internal(task, d, None, True)
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
||||
|
||||
def clean_basepath(a):
|
||||
b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
|
||||
@@ -366,12 +323,10 @@ def clean_basepaths_list(a):
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
with open(a, 'rb') as f:
|
||||
p1 = pickle.Unpickler(f)
|
||||
a_data = p1.load()
|
||||
with open(b, 'rb') as f:
|
||||
p2 = pickle.Unpickler(f)
|
||||
b_data = p2.load()
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
a_data = p1.load()
|
||||
p2 = pickle.Unpickler(open(b, "rb"))
|
||||
b_data = p2.load()
|
||||
|
||||
def dict_diff(a, b, whitelist=set()):
|
||||
sa = set(a.keys())
|
||||
@@ -453,11 +408,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for dep in changed:
|
||||
output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
|
||||
|
||||
if not 'file_checksum_values' in a_data:
|
||||
a_data['file_checksum_values'] = {}
|
||||
if not 'file_checksum_values' in b_data:
|
||||
b_data['file_checksum_values'] = {}
|
||||
|
||||
changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
||||
if changed:
|
||||
for f, old, new in changed:
|
||||
@@ -469,20 +419,12 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
if not 'runtaskdeps' in a_data:
|
||||
a_data['runtaskdeps'] = {}
|
||||
if not 'runtaskdeps' in b_data:
|
||||
b_data['runtaskdeps'] = {}
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
else:
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
@@ -531,46 +473,11 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
return output
|
||||
|
||||
|
||||
def calc_basehash(sigdata):
|
||||
task = sigdata['task']
|
||||
basedata = sigdata['varvals'][task]
|
||||
|
||||
if basedata is None:
|
||||
basedata = ''
|
||||
|
||||
alldeps = sigdata['taskdeps']
|
||||
for dep in alldeps:
|
||||
basedata = basedata + dep
|
||||
val = sigdata['varvals'][dep]
|
||||
if val is not None:
|
||||
basedata = basedata + str(val)
|
||||
|
||||
return hashlib.md5(basedata.encode("utf-8")).hexdigest()
|
||||
|
||||
def calc_taskhash(sigdata):
|
||||
data = sigdata['basehash']
|
||||
|
||||
for dep in sigdata['runtaskdeps']:
|
||||
data = data + sigdata['runtaskhashes'][dep]
|
||||
|
||||
for c in sigdata['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in sigdata:
|
||||
if 'nostamp:' in sigdata['taint']:
|
||||
data = data + sigdata['taint'][8:]
|
||||
else:
|
||||
data = data + sigdata['taint']
|
||||
|
||||
return hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def dump_sigfile(a):
|
||||
output = []
|
||||
|
||||
with open(a, 'rb') as f:
|
||||
p1 = pickle.Unpickler(f)
|
||||
a_data = p1.load()
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
a_data = p1.load()
|
||||
|
||||
output.append("basewhitelist: %s" % (a_data['basewhitelist']))
|
||||
|
||||
@@ -599,13 +506,17 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
if 'task' in a_data:
|
||||
computed_basehash = calc_basehash(a_data)
|
||||
output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
|
||||
else:
|
||||
output.append("Unable to compute base hash")
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
computed_taskhash = calc_taskhash(a_data)
|
||||
output.append("Computed task hash is %s" % computed_taskhash)
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -37,24 +37,27 @@ def re_match_strings(target, strings):
|
||||
return any(name == target or re.match(name, target)
|
||||
for name in strings)
|
||||
|
||||
class TaskEntry:
|
||||
def __init__(self):
|
||||
self.tdepends = []
|
||||
self.idepends = []
|
||||
self.irdepends = []
|
||||
|
||||
class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
|
||||
self.build_targets = {}
|
||||
self.run_targets = {}
|
||||
|
||||
self.external_targets = []
|
||||
|
||||
self.seenfns = []
|
||||
self.taskentries = {}
|
||||
self.tasks_fnid = []
|
||||
self.tasks_name = []
|
||||
self.tasks_tdepends = []
|
||||
self.tasks_idepends = []
|
||||
self.tasks_irdepends = []
|
||||
# Cache to speed up task ID lookups
|
||||
self.tasks_lookup = {}
|
||||
|
||||
self.depids = {}
|
||||
self.rdepids = {}
|
||||
@@ -63,14 +66,95 @@ class TaskData:
|
||||
|
||||
self.failed_deps = []
|
||||
self.failed_rdeps = []
|
||||
self.failed_fns = []
|
||||
self.failed_fnids = []
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
self.allowincomplete = allowincomplete
|
||||
|
||||
self.skiplist = skiplist
|
||||
|
||||
def getbuild_id(self, name):
|
||||
"""
|
||||
Return an ID number for the build target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.build_names_index:
|
||||
self.build_names_index.append(name)
|
||||
return len(self.build_names_index) - 1
|
||||
|
||||
return self.build_names_index.index(name)
|
||||
|
||||
def getrun_id(self, name):
|
||||
"""
|
||||
Return an ID number for the run target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.run_names_index:
|
||||
self.run_names_index.append(name)
|
||||
return len(self.run_names_index) - 1
|
||||
|
||||
return self.run_names_index.index(name)
|
||||
|
||||
def getfn_id(self, name):
|
||||
"""
|
||||
Return an ID number for the filename.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.fn_index:
|
||||
self.fn_index.append(name)
|
||||
return len(self.fn_index) - 1
|
||||
|
||||
return self.fn_index.index(name)
|
||||
|
||||
def gettask_ids(self, fnid):
|
||||
"""
|
||||
Return an array of the ID numbers matching a given fnid.
|
||||
"""
|
||||
ids = []
|
||||
if fnid in self.tasks_lookup:
|
||||
for task in self.tasks_lookup[fnid]:
|
||||
ids.append(self.tasks_lookup[fnid][task])
|
||||
return ids
|
||||
|
||||
def gettask_id_fromfnid(self, fnid, task):
|
||||
"""
|
||||
Return an ID number for the task matching fnid and task.
|
||||
"""
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
return None
|
||||
|
||||
def gettask_id(self, fn, task, create = True):
|
||||
"""
|
||||
Return an ID number for the task matching fn and task.
|
||||
If it doesn't exist, create one by default.
|
||||
Optionally return None instead.
|
||||
"""
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
if not create:
|
||||
return None
|
||||
|
||||
self.tasks_name.append(task)
|
||||
self.tasks_fnid.append(fnid)
|
||||
self.tasks_tdepends.append([])
|
||||
self.tasks_idepends.append([])
|
||||
self.tasks_irdepends.append([])
|
||||
|
||||
listid = len(self.tasks_name) - 1
|
||||
|
||||
if fnid not in self.tasks_lookup:
|
||||
self.tasks_lookup[fnid] = {}
|
||||
self.tasks_lookup[fnid][task] = listid
|
||||
|
||||
return listid
|
||||
|
||||
def add_tasks(self, fn, dataCache):
|
||||
"""
|
||||
Add tasks for a given fn to the database
|
||||
@@ -78,31 +162,27 @@ class TaskData:
|
||||
|
||||
task_deps = dataCache.task_deps[fn]
|
||||
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.failed_fnids:
|
||||
bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
|
||||
|
||||
# Check if we've already seen this fn
|
||||
if fn in self.seenfns:
|
||||
if fnid in self.tasks_fnid:
|
||||
return
|
||||
|
||||
self.seenfns.append(fn)
|
||||
|
||||
self.add_extra_deps(fn, dataCache)
|
||||
|
||||
for task in task_deps['tasks']:
|
||||
|
||||
tid = "%s:%s" % (fn, task)
|
||||
self.taskentries[tid] = TaskEntry()
|
||||
|
||||
# Work out task dependencies
|
||||
parentids = []
|
||||
for dep in task_deps['parents'][task]:
|
||||
if dep not in task_deps['tasks']:
|
||||
bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
|
||||
continue
|
||||
parentid = "%s:%s" % (fn, dep)
|
||||
parentid = self.gettask_id(fn, dep)
|
||||
parentids.append(parentid)
|
||||
self.taskentries[tid].tdepends.extend(parentids)
|
||||
taskid = self.gettask_id(fn, task)
|
||||
self.tasks_tdepends[taskid].extend(parentids)
|
||||
|
||||
# Touch all intertask dependencies
|
||||
if 'depends' in task_deps and task in task_deps['depends']:
|
||||
@@ -111,30 +191,29 @@ class TaskData:
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((dep.split(":")[0]), dep.split(":")[1]))
|
||||
self.seen_build_target(dep.split(":")[0])
|
||||
self.taskentries[tid].idepends.extend(ids)
|
||||
ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_idepends[taskid].extend(ids)
|
||||
if 'rdepends' in task_deps and task in task_deps['rdepends']:
|
||||
ids = []
|
||||
for dep in task_deps['rdepends'][task].split():
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((dep.split(":")[0]), dep.split(":")[1]))
|
||||
self.seen_run_target(dep.split(":")[0])
|
||||
self.taskentries[tid].irdepends.extend(ids)
|
||||
ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_irdepends[taskid].extend(ids)
|
||||
|
||||
|
||||
# Work out build dependencies
|
||||
if not fn in self.depids:
|
||||
dependids = set()
|
||||
if not fnid in self.depids:
|
||||
dependids = {}
|
||||
for depend in dataCache.deps[fn]:
|
||||
dependids.add(depend)
|
||||
self.depids[fn] = list(dependids)
|
||||
dependids[self.getbuild_id(depend)] = None
|
||||
self.depids[fnid] = dependids.keys()
|
||||
logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
|
||||
|
||||
# Work out runtime dependencies
|
||||
if not fn in self.rdepids:
|
||||
rdependids = set()
|
||||
if not fnid in self.rdepids:
|
||||
rdependids = {}
|
||||
rdepends = dataCache.rundeps[fn]
|
||||
rrecs = dataCache.runrecs[fn]
|
||||
rdependlist = []
|
||||
@@ -142,48 +221,33 @@ class TaskData:
|
||||
for package in rdepends:
|
||||
for rdepend in rdepends[package]:
|
||||
rdependlist.append(rdepend)
|
||||
rdependids.add(rdepend)
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
for package in rrecs:
|
||||
for rdepend in rrecs[package]:
|
||||
rreclist.append(rdepend)
|
||||
rdependids.add(rdepend)
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
if rdependlist:
|
||||
logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
|
||||
if rreclist:
|
||||
logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
|
||||
self.rdepids[fn] = list(rdependids)
|
||||
self.rdepids[fnid] = rdependids.keys()
|
||||
|
||||
for dep in self.depids[fn]:
|
||||
self.seen_build_target(dep)
|
||||
for dep in self.depids[fnid]:
|
||||
if dep in self.failed_deps:
|
||||
self.fail_fn(fn)
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
for dep in self.rdepids[fn]:
|
||||
self.seen_run_target(dep)
|
||||
for dep in self.rdepids[fnid]:
|
||||
if dep in self.failed_rdeps:
|
||||
self.fail_fn(fn)
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
|
||||
def add_extra_deps(self, fn, dataCache):
|
||||
func = dataCache.extradepsfunc.get(fn, None)
|
||||
if func:
|
||||
bb.providers.buildWorldTargetList(dataCache)
|
||||
pn = dataCache.pkg_fn[fn]
|
||||
params = {'deps': dataCache.deps[fn],
|
||||
'world_target': dataCache.world_target,
|
||||
'pkg_pn': dataCache.pkg_pn,
|
||||
'self_pn': pn}
|
||||
funcname = '_%s_calculate_extra_depends' % pn.replace('-', '_')
|
||||
paramlist = ','.join(params.keys())
|
||||
func = 'def %s(%s):\n%s\n\n%s(%s)' % (funcname, paramlist, func, funcname, paramlist)
|
||||
bb.utils.better_exec(func, params)
|
||||
|
||||
|
||||
def have_build_target(self, target):
|
||||
"""
|
||||
Have we a build target matching this name?
|
||||
"""
|
||||
if target in self.build_targets and self.build_targets[target]:
|
||||
targetid = self.getbuild_id(target)
|
||||
|
||||
if targetid in self.build_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -191,54 +255,50 @@ class TaskData:
|
||||
"""
|
||||
Have we a runtime target matching this name?
|
||||
"""
|
||||
if target in self.run_targets and self.run_targets[target]:
|
||||
targetid = self.getrun_id(target)
|
||||
|
||||
if targetid in self.run_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
def seen_build_target(self, name):
|
||||
"""
|
||||
Maintain a list of build targets
|
||||
"""
|
||||
if name not in self.build_targets:
|
||||
self.build_targets[name] = []
|
||||
|
||||
def add_build_target(self, fn, item):
|
||||
"""
|
||||
Add a build target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
if item in self.build_targets:
|
||||
if fn in self.build_targets[item]:
|
||||
return
|
||||
self.build_targets[item].append(fn)
|
||||
return
|
||||
self.build_targets[item] = [fn]
|
||||
targetid = self.getbuild_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
def seen_run_target(self, name):
|
||||
"""
|
||||
Maintain a list of runtime build targets
|
||||
"""
|
||||
if name not in self.run_targets:
|
||||
self.run_targets[name] = []
|
||||
if targetid in self.build_targets:
|
||||
if fnid in self.build_targets[targetid]:
|
||||
return
|
||||
self.build_targets[targetid].append(fnid)
|
||||
return
|
||||
self.build_targets[targetid] = [fnid]
|
||||
|
||||
def add_runtime_target(self, fn, item):
|
||||
"""
|
||||
Add a runtime target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
if item in self.run_targets:
|
||||
if fn in self.run_targets[item]:
|
||||
return
|
||||
self.run_targets[item].append(fn)
|
||||
return
|
||||
self.run_targets[item] = [fn]
|
||||
targetid = self.getrun_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
def mark_external_target(self, target):
|
||||
if targetid in self.run_targets:
|
||||
if fnid in self.run_targets[targetid]:
|
||||
return
|
||||
self.run_targets[targetid].append(fnid)
|
||||
return
|
||||
self.run_targets[targetid] = [fnid]
|
||||
|
||||
def mark_external_target(self, item):
|
||||
"""
|
||||
Mark a build target as being externally requested
|
||||
"""
|
||||
if target not in self.external_targets:
|
||||
self.external_targets.append(target)
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
if targetid not in self.external_targets:
|
||||
self.external_targets.append(targetid)
|
||||
|
||||
def get_unresolved_build_targets(self, dataCache):
|
||||
"""
|
||||
@@ -246,12 +306,12 @@ class TaskData:
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.build_targets:
|
||||
for target in self.build_names_index:
|
||||
if re_match_strings(target, dataCache.ignored_dependencies):
|
||||
continue
|
||||
if target in self.failed_deps:
|
||||
if self.build_names_index.index(target) in self.failed_deps:
|
||||
continue
|
||||
if not self.build_targets[target]:
|
||||
if not self.have_build_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
@@ -261,12 +321,12 @@ class TaskData:
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.run_targets:
|
||||
for target in self.run_names_index:
|
||||
if re_match_strings(target, dataCache.ignored_dependencies):
|
||||
continue
|
||||
if target in self.failed_rdeps:
|
||||
if self.run_names_index.index(target) in self.failed_rdeps:
|
||||
continue
|
||||
if not self.run_targets[target]:
|
||||
if not self.have_runtime_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
@@ -274,26 +334,50 @@ class TaskData:
|
||||
"""
|
||||
Return a list of providers of item
|
||||
"""
|
||||
return self.build_targets[item]
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
def get_dependees(self, item):
|
||||
return self.build_targets[targetid]
|
||||
|
||||
def get_dependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on item
|
||||
"""
|
||||
dependees = []
|
||||
for fn in self.depids:
|
||||
if item in self.depids[fn]:
|
||||
dependees.append(fn)
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_rdependees(self, item):
|
||||
def get_dependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on item as a user readable string
|
||||
"""
|
||||
itemid = self.getbuild_id(item)
|
||||
dependees = []
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def get_rdependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item
|
||||
"""
|
||||
dependees = []
|
||||
for fn in self.rdepids:
|
||||
if item in self.rdepids[fn]:
|
||||
dependees.append(fn)
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_rdependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item as a user readable string
|
||||
"""
|
||||
itemid = self.getrun_id(item)
|
||||
dependees = []
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def get_reasons(self, item, runtime=False):
|
||||
@@ -329,7 +413,7 @@ class TaskData:
|
||||
except bb.providers.NoProvider:
|
||||
if self.abort:
|
||||
raise
|
||||
self.remove_buildtarget(item)
|
||||
self.remove_buildtarget(self.getbuild_id(item))
|
||||
|
||||
self.mark_external_target(item)
|
||||
|
||||
@@ -344,14 +428,7 @@ class TaskData:
|
||||
return
|
||||
|
||||
if not item in dataCache.providers:
|
||||
close_matches = self.get_close_matches(item, list(dataCache.providers.keys()))
|
||||
# Is it in RuntimeProviders ?
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
for fn in all_p:
|
||||
new = dataCache.pkg_fn[fn] + " RPROVIDES " + item
|
||||
if new not in close_matches:
|
||||
close_matches.append(new)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=self.get_close_matches(item, dataCache.providers.keys())), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if self.have_build_target(item):
|
||||
@@ -360,10 +437,10 @@ class TaskData:
|
||||
all_p = dataCache.providers[item]
|
||||
|
||||
eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not p in self.failed_fns]
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if len(eligible) > 1 and foundUnique == False:
|
||||
@@ -375,7 +452,8 @@ class TaskData:
|
||||
self.consider_msgs_cache.append(item)
|
||||
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
logger.debug(2, "adding %s to satisfy %s", fn, item)
|
||||
self.add_build_target(fn, item)
|
||||
@@ -399,14 +477,14 @@ class TaskData:
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
|
||||
if not all_p:
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=self.get_reasons(item, True)), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not p in self.failed_fns]
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
if len(eligible) > 1 and numberPreferred == 0:
|
||||
@@ -428,80 +506,80 @@ class TaskData:
|
||||
|
||||
# run through the list until we find one that we can build
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
def fail_fn(self, fn, missing_list=None):
|
||||
def fail_fnid(self, fnid, missing_list = []):
|
||||
"""
|
||||
Mark a file as failed (unbuildable)
|
||||
Remove any references from build and runtime provider lists
|
||||
|
||||
missing_list, A list of missing requirements for this target
|
||||
"""
|
||||
if fn in self.failed_fns:
|
||||
if fnid in self.failed_fnids:
|
||||
return
|
||||
if not missing_list:
|
||||
missing_list = []
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", fn)
|
||||
self.failed_fns.append(fn)
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
|
||||
self.failed_fnids.append(fnid)
|
||||
for target in self.build_targets:
|
||||
if fn in self.build_targets[target]:
|
||||
self.build_targets[target].remove(fn)
|
||||
if fnid in self.build_targets[target]:
|
||||
self.build_targets[target].remove(fnid)
|
||||
if len(self.build_targets[target]) == 0:
|
||||
self.remove_buildtarget(target, missing_list)
|
||||
for target in self.run_targets:
|
||||
if fn in self.run_targets[target]:
|
||||
self.run_targets[target].remove(fn)
|
||||
if fnid in self.run_targets[target]:
|
||||
self.run_targets[target].remove(fnid)
|
||||
if len(self.run_targets[target]) == 0:
|
||||
self.remove_runtarget(target, missing_list)
|
||||
|
||||
def remove_buildtarget(self, target, missing_list=None):
|
||||
def remove_buildtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a build target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [target]
|
||||
missing_list = [self.build_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [target] + missing_list
|
||||
logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
self.failed_deps.append(target)
|
||||
dependees = self.get_dependees(target)
|
||||
for fn in dependees:
|
||||
self.fail_fn(fn, missing_list)
|
||||
for tid in self.taskentries:
|
||||
for (idepend, idependtask) in self.taskentries[tid].idepends:
|
||||
if idepend == target:
|
||||
fn = tid.rsplit(":",1)[0]
|
||||
self.fail_fn(fn, missing_list)
|
||||
missing_list = [self.build_names_index[targetid]] + missing_list
|
||||
logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
|
||||
self.failed_deps.append(targetid)
|
||||
dependees = self.get_dependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_idepends)):
|
||||
idepends = self.tasks_idepends[taskid]
|
||||
for (idependid, idependtask) in idepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
if self.abort and target in self.external_targets:
|
||||
if self.abort and targetid in self.external_targets:
|
||||
target = self.build_names_index[targetid]
|
||||
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
raise bb.providers.NoProvider(target)
|
||||
|
||||
def remove_runtarget(self, target, missing_list=None):
|
||||
def remove_runtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a run target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [target]
|
||||
missing_list = [self.run_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [target] + missing_list
|
||||
missing_list = [self.run_names_index[targetid]] + missing_list
|
||||
|
||||
logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
self.failed_rdeps.append(target)
|
||||
dependees = self.get_rdependees(target)
|
||||
for fn in dependees:
|
||||
self.fail_fn(fn, missing_list)
|
||||
for tid in self.taskentries:
|
||||
for (idepend, idependtask) in self.taskentries[tid].irdepends:
|
||||
if idepend == target:
|
||||
fn = tid.rsplit(":",1)[0]
|
||||
self.fail_fn(fn, missing_list)
|
||||
logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
|
||||
self.failed_rdeps.append(targetid)
|
||||
dependees = self.get_rdependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_irdepends)):
|
||||
irdepends = self.tasks_irdepends[taskid]
|
||||
for (idependid, idependtask) in irdepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
def add_unresolved(self, cfgData, dataCache):
|
||||
"""
|
||||
@@ -515,68 +593,59 @@ class TaskData:
|
||||
self.add_provider_internal(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
if self.abort and target in self.external_targets and not self.allowincomplete:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets:
|
||||
raise
|
||||
if not self.allowincomplete:
|
||||
self.remove_buildtarget(target)
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
|
||||
self.remove_runtarget(target)
|
||||
self.remove_runtarget(self.getrun_id(target))
|
||||
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||
if added == 0:
|
||||
break
|
||||
# self.dump_data()
|
||||
|
||||
def get_providermap(self, prefix=None):
|
||||
provmap = {}
|
||||
for name in self.build_targets:
|
||||
if prefix and not name.startswith(prefix):
|
||||
continue
|
||||
if self.have_build_target(name):
|
||||
provider = self.get_provider(name)
|
||||
if provider:
|
||||
provmap[name] = provider[0]
|
||||
return provmap
|
||||
|
||||
def dump_data(self):
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
"""
|
||||
logger.debug(3, "build_names:")
|
||||
logger.debug(3, ", ".join(self.build_targets))
|
||||
logger.debug(3, ", ".join(self.build_names_index))
|
||||
|
||||
logger.debug(3, "run_names:")
|
||||
logger.debug(3, ", ".join(self.run_targets))
|
||||
logger.debug(3, ", ".join(self.run_names_index))
|
||||
|
||||
logger.debug(3, "build_targets:")
|
||||
for target in self.build_targets:
|
||||
for buildid in xrange(len(self.build_names_index)):
|
||||
target = self.build_names_index[buildid]
|
||||
targets = "None"
|
||||
if target in self.build_targets:
|
||||
targets = self.build_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
if buildid in self.build_targets:
|
||||
targets = self.build_targets[buildid]
|
||||
logger.debug(3, " (%s)%s: %s", buildid, target, targets)
|
||||
|
||||
logger.debug(3, "run_targets:")
|
||||
for target in self.run_targets:
|
||||
for runid in xrange(len(self.run_names_index)):
|
||||
target = self.run_names_index[runid]
|
||||
targets = "None"
|
||||
if target in self.run_targets:
|
||||
targets = self.run_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
if runid in self.run_targets:
|
||||
targets = self.run_targets[runid]
|
||||
logger.debug(3, " (%s)%s: %s", runid, target, targets)
|
||||
|
||||
logger.debug(3, "tasks:")
|
||||
for tid in self.taskentries:
|
||||
logger.debug(3, " %s: %s %s %s",
|
||||
tid,
|
||||
self.taskentries[tid].idepends,
|
||||
self.taskentries[tid].irdepends,
|
||||
self.taskentries[tid].tdepends)
|
||||
for task in xrange(len(self.tasks_name)):
|
||||
logger.debug(3, " (%s)%s - %s: %s",
|
||||
task,
|
||||
self.fn_index[self.tasks_fnid[task]],
|
||||
self.tasks_name[task],
|
||||
self.tasks_tdepends[task])
|
||||
|
||||
logger.debug(3, "dependency ids (per fn):")
|
||||
for fn in self.depids:
|
||||
logger.debug(3, " %s: %s", fn, self.depids[fn])
|
||||
for fnid in self.depids:
|
||||
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
|
||||
|
||||
logger.debug(3, "runtime dependency ids (per fn):")
|
||||
for fn in self.rdepids:
|
||||
logger.debug(3, " %s: %s", fn, self.rdepids[fn])
|
||||
for fnid in self.rdepids:
|
||||
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
|
||||
|
||||
@@ -191,8 +191,8 @@ class PythonReferenceTest(ReferenceTest):
|
||||
if hasattr(bb.utils, "_context"):
|
||||
self.context = bb.utils._context
|
||||
else:
|
||||
import builtins
|
||||
self.context = builtins.__dict__
|
||||
import __builtin__
|
||||
self.context = __builtin__.__dict__
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
@@ -293,16 +293,11 @@ bb.data.getVar(a(), d, False)
|
||||
def test_python(self):
|
||||
self.d.setVar("FOO", self.pydata)
|
||||
self.setEmptyVars(["inexpand", "a", "test2", "test"])
|
||||
self.d.setVarFlags("FOO", {
|
||||
"func": True,
|
||||
"python": True,
|
||||
"lineno": 1,
|
||||
"filename": "example.bb",
|
||||
})
|
||||
self.d.setVarFlags("FOO", {"func": True, "python": True})
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
@@ -349,7 +344,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "inverted"] + execs))
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
@@ -359,7 +354,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
@@ -368,7 +363,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
|
||||
@@ -34,14 +34,14 @@ class COWTestCase(unittest.TestCase):
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEqual(False, 'a' in a)
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEqual(True, 'a' in a)
|
||||
self.assertEqual(True, 'b' in a)
|
||||
self.assertEqual('a', a['a'] )
|
||||
self.assertEqual('b', a['b'] )
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
@@ -60,31 +60,31 @@ class COWTestCase(unittest.TestCase):
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEqual(False, 'c' in c)
|
||||
self.assertEqual(30, c['a'])
|
||||
self.assertEqual(10, b['a'])
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEqual(False, 'c' in c_2)
|
||||
self.assertEqual(10, b_2['a'])
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEqual(False, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEqual(True, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(30, c_2['d'])
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
@@ -92,19 +92,19 @@ class COWTestCase(unittest.TestCase):
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(False, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEqual('viel', b_3['e'])
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(True, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
@@ -115,12 +115,12 @@ class COWTestCase(unittest.TestCase):
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1027, copy['123'])
|
||||
self.assertEqual(4711, copy['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
@@ -128,9 +128,9 @@ class COWTestCase(unittest.TestCase):
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1028, copy['123'])
|
||||
self.assertEqual(4712, copy['other'])
|
||||
self.assertEqual({'abc':20, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
|
||||
@@ -24,30 +24,6 @@ import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
import bb.parse
|
||||
import logging
|
||||
|
||||
class LogRecord():
|
||||
def __enter__(self):
|
||||
logs = []
|
||||
class LogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
logs.append(record)
|
||||
logger = logging.getLogger("BitBake")
|
||||
handler = LogHandler()
|
||||
self.handler = handler
|
||||
logger.addHandler(handler)
|
||||
return logs
|
||||
def __exit__(self, type, value, traceback):
|
||||
logger = logging.getLogger("BitBake")
|
||||
logger.removeHandler(self.handler)
|
||||
return
|
||||
|
||||
def logContains(item, logs):
|
||||
for l in logs:
|
||||
m = l.getMessage()
|
||||
if item in m:
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -80,11 +56,6 @@ class DataExpansions(unittest.TestCase):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value_of_foo value_of_bar")
|
||||
|
||||
def test_python_unexpanded(self):
|
||||
self.d.setVar("bar", "${unsetvar}")
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}")
|
||||
|
||||
def test_python_snippet_syntax_error(self):
|
||||
self.d.setVar("FOO", "${@foo = 5}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
@@ -139,22 +110,22 @@ class DataExpansions(unittest.TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = list(self.d.keys())
|
||||
self.assertCountEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = list(newd.keys())
|
||||
self.assertCountEqual(keys, ['value_of_foo', 'foo'])
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -201,28 +172,28 @@ class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo", False),
|
||||
d.getVar("bar", False))
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -275,13 +246,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
def test_append_unset(self):
|
||||
self.d.setVar("TEST_prepend", "${FOO}:")
|
||||
self.d.setVar("TEST_append", ":val2")
|
||||
self.d.setVar("TEST_append", ":${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
|
||||
|
||||
def test_remove(self):
|
||||
self.d.setVar("TEST", "${VAL} ${BAR}")
|
||||
self.d.setVar("TEST_remove", "val")
|
||||
@@ -330,66 +294,13 @@ class TestOverrides(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_one_override_unset(self):
|
||||
self.d.setVar("TEST2_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
|
||||
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
|
||||
def test_multiple_combined_overrides(self):
|
||||
self.d.setVar("TEST_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_multiple_overrides_unset(self):
|
||||
self.d.setVar("TEST2_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
|
||||
|
||||
def test_keyexpansion_override(self):
|
||||
self.d.setVar("LOCAL", "local")
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_${LOCAL}", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_rename_override(self):
|
||||
self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
|
||||
self.d.setVar("OVERRIDES", "class-target")
|
||||
bb.data.update_data(self.d)
|
||||
self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
|
||||
self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
|
||||
|
||||
def test_underscore_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_some_val", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
self.d.setVar("OVERRIDES", "foo:bar:some_val")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
class TestKeyExpansion(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("BAR", "foo")
|
||||
|
||||
def test_keyexpand(self):
|
||||
self.d.setVar("VAL_${FOO}", "A")
|
||||
self.d.setVar("VAL_${BAR}", "B")
|
||||
with LogRecord() as logs:
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
|
||||
self.assertEqual(self.d.getVar("VAL_foo", True), "A")
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -399,13 +310,13 @@ class TestFlags(unittest.TestCase):
|
||||
self.d.setVarFlag("foo", "flag2", "value of flag2")
|
||||
|
||||
def test_setflag(self):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
|
||||
|
||||
def test_delflag(self):
|
||||
self.d.delVarFlag("foo", "flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
class Contains(unittest.TestCase):
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import collections
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -134,10 +133,10 @@ class URITest(unittest.TestCase):
|
||||
'userinfo': 'anoncvs:anonymous',
|
||||
'username': 'anoncvs',
|
||||
'password': 'anonymous',
|
||||
'params': collections.OrderedDict([
|
||||
('tag', 'V0-99-81'),
|
||||
('module', 'familiar/dist/ipkg')
|
||||
]),
|
||||
'params': {
|
||||
'tag': 'V0-99-81',
|
||||
'module': 'familiar/dist/ipkg'
|
||||
},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
@@ -229,38 +228,7 @@ class URITest(unittest.TestCase):
|
||||
'params': {},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
"http://somesite.net;someparam=1": {
|
||||
'uri': 'http://somesite.net;someparam=1',
|
||||
'scheme': 'http',
|
||||
'hostname': 'somesite.net',
|
||||
'port': None,
|
||||
'hostport': 'somesite.net',
|
||||
'path': '',
|
||||
'userinfo': '',
|
||||
'userinfo': '',
|
||||
'username': '',
|
||||
'password': '',
|
||||
'params': {"someparam" : "1"},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
"file://somelocation;someparam=1": {
|
||||
'uri': 'file:somelocation;someparam=1',
|
||||
'scheme': 'file',
|
||||
'hostname': '',
|
||||
'port': None,
|
||||
'hostport': '',
|
||||
'path': 'somelocation',
|
||||
'userinfo': '',
|
||||
'userinfo': '',
|
||||
'username': '',
|
||||
'password': '',
|
||||
'params': {"someparam" : "1"},
|
||||
'query': {},
|
||||
'relative': True
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def test_uri(self):
|
||||
@@ -360,10 +328,7 @@ class FetcherTest(unittest.TestCase):
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
|
||||
print("Not cleaning up %s. Please remove manually." % self.tempdir)
|
||||
else:
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
|
||||
@@ -428,33 +393,11 @@ class MirrorUriTest(FetcherTest):
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_mirror_of_mirror(self):
|
||||
# Test if mirror of a mirror works
|
||||
mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
|
||||
mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
|
||||
'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
|
||||
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
|
||||
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \
|
||||
"https://.*/[^/]* https://BBBB/B/B/B/ \n"
|
||||
|
||||
def test_recursive(self):
|
||||
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz',
|
||||
'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz',
|
||||
'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
|
||||
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
def touch(fn):
|
||||
with open(fn, 'a'):
|
||||
with file(fn, 'a'):
|
||||
os.utime(fn, None)
|
||||
|
||||
super(FetcherLocalTest, self).setUp()
|
||||
@@ -486,7 +429,9 @@ class FetcherLocalTest(FetcherTest):
|
||||
|
||||
def test_local_wildcard(self):
|
||||
tree = self.fetchUnpack(['file://a', 'file://dir/*'])
|
||||
self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
|
||||
# FIXME: this is broken - it should return ['a', 'dir/c', 'dir/d', 'dir/subdir/e']
|
||||
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6128
|
||||
self.assertEqual(tree, ['a', 'b', 'dir/c', 'dir/d', 'dir/subdir/e'])
|
||||
|
||||
def test_local_dir(self):
|
||||
tree = self.fetchUnpack(['file://a', 'file://dir'])
|
||||
@@ -494,29 +439,22 @@ class FetcherLocalTest(FetcherTest):
|
||||
|
||||
def test_local_subdir(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
# FIXME: this is broken - it should return ['dir/subdir/e']
|
||||
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6129
|
||||
self.assertEqual(tree, ['subdir/e'])
|
||||
|
||||
def test_local_subdir_file(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
|
||||
def test_local_subdirparam(self):
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar', 'file://dir;subdir=foo/moo'])
|
||||
self.assertEqual(tree, ['bar/a', 'foo/moo/dir/c', 'foo/moo/dir/d', 'foo/moo/dir/subdir/e'])
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/a'])
|
||||
|
||||
def test_local_deepsubdirparam(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/dir/subdir/e'])
|
||||
|
||||
def test_local_absolutedir(self):
|
||||
# Unpacking to an absolute path that is a subdirectory of the root
|
||||
# should work
|
||||
tree = self.fetchUnpack(['file://a;subdir=%s' % os.path.join(self.unpackdir, 'bar')])
|
||||
|
||||
# Unpacking to an absolute path outside of the root should fail
|
||||
with self.assertRaises(bb.fetch2.UnpackError):
|
||||
self.fetchUnpack(['file://a;subdir=/bin/sh'])
|
||||
|
||||
class FetcherNetworkTest(FetcherTest):
|
||||
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
@@ -540,19 +478,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_file_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
os.mkdir(self.dldir + "/some2where")
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
@@ -597,36 +522,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
|
||||
self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
|
||||
|
||||
def test_gitfetch_localusehead(self):
|
||||
# Create dummy local Git repo
|
||||
src_dir = tempfile.mkdtemp(dir=self.tempdir,
|
||||
prefix='gitfetch_localusehead_')
|
||||
src_dir = os.path.abspath(src_dir)
|
||||
bb.process.run("git init", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit'",
|
||||
cwd=src_dir)
|
||||
# Use other branch than master
|
||||
bb.process.run("git checkout -b my-devel", cwd=src_dir)
|
||||
bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
|
||||
cwd=src_dir)
|
||||
stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
|
||||
orig_rev = stdout[0].strip()
|
||||
|
||||
# Fetch and check revision
|
||||
self.d.setVar("SRCREV", "AUTOINC")
|
||||
url = "git://" + src_dir + ";protocol=file;usehead=1"
|
||||
fetcher = bb.fetch.Fetch([url], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
stdout = bb.process.run("git rev-parse HEAD",
|
||||
cwd=os.path.join(self.unpackdir, 'git'))
|
||||
unpack_rev = stdout[0].strip()
|
||||
self.assertEqual(orig_rev, unpack_rev)
|
||||
|
||||
def test_gitfetch_remoteusehead(self):
|
||||
url = "git://git.openembedded.org/bitbake;usehead=1"
|
||||
self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
|
||||
|
||||
def test_gitfetch_premirror(self):
|
||||
url1 = "git://git.openembedded.org/bitbake"
|
||||
url2 = "git://someserver.org/bitbake"
|
||||
@@ -654,68 +549,17 @@ class FetcherNetworkTest(FetcherTest):
|
||||
os.chdir(os.path.dirname(self.unpackdir))
|
||||
fetcher.unpack(self.unpackdir)
|
||||
|
||||
|
||||
class TrustedNetworksTest(FetcherTest):
|
||||
def test_trusted_network(self):
|
||||
# Ensure trusted_network returns False when the host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the *.host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_two_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://something.git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_port_trusted_network(self):
|
||||
# Ensure trusted_network returns True, even if the url specifies a port.
|
||||
url = "git://someserver.org:8080/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://*.someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
|
||||
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
|
||||
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
|
||||
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'})
|
||||
}
|
||||
# we require a pathname to encodeurl but users can still pass such urls to
|
||||
# decodeurl and we need to handle them
|
||||
decodedata = datatable.copy()
|
||||
decodedata.update({
|
||||
"http://somesite.net;someparam=1": ('http', 'somesite.net', '', '', '', {'someparam': '1'}),
|
||||
})
|
||||
|
||||
def test_decodeurl(self):
|
||||
for k, v in self.decodedata.items():
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.decodeurl(k)
|
||||
self.assertEqual(result, v)
|
||||
|
||||
@@ -724,7 +568,7 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchLatestVersionTest(FetcherTest):
|
||||
class FetchMethodTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
@@ -747,7 +591,7 @@ class FetchLatestVersionTest(FetcherTest):
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
|
||||
# packages with a valid GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
@@ -789,66 +633,18 @@ class FetchLatestVersionTest(FetcherTest):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
|
||||
self.d.setVar("GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver= ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("UPSTREAM_CHECK_URI", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
|
||||
self.d.setVar("REGEX_URI", k[2])
|
||||
self.d.setVar("REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver = ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
|
||||
class FetchCheckStatusTest(FetcherTest):
|
||||
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
|
||||
"http://www.cups.org/",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
|
||||
"https://yoctoproject.org/",
|
||||
"https://yoctoproject.org/documentation",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
|
||||
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
|
||||
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
|
||||
]
|
||||
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
|
||||
def test_wget_checkstatus(self):
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d)
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
|
||||
def test_wget_checkstatus_connection_cache(self):
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
connection_cache = FetchConnectionCache()
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d,
|
||||
connection_cache = connection_cache)
|
||||
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
connection_cache.close_connections()
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Test for lib/bb/parse/
|
||||
#
|
||||
# Copyright (C) 2015 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import logging
|
||||
import bb
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('BitBake.TestParse')
|
||||
|
||||
import bb.parse
|
||||
import bb.data
|
||||
import bb.siggen
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
|
||||
testfile = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
do_install() {
|
||||
echo "hello"
|
||||
}
|
||||
|
||||
C = "3"
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
bb.parse.siggen = bb.siggen.init(self.d)
|
||||
|
||||
def parsehelper(self, content, suffix = ".bb"):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = suffix)
|
||||
f.write(bytes(content, "utf-8"))
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
def test_parse_simple(self):
|
||||
f = self.parsehelper(self.testfile)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), "1")
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
self.assertEqual(d.getVar("C", True), "3")
|
||||
|
||||
def test_parse_incomplete_function(self):
|
||||
testfileB = self.testfile.replace("}", "")
|
||||
f = self.parsehelper(testfileB)
|
||||
with self.assertRaises(bb.parse.ParseError):
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
|
||||
unsettest = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
B[flag] = "3"
|
||||
|
||||
unset A
|
||||
unset B[flag]
|
||||
"""
|
||||
|
||||
def test_parse_unset(self):
|
||||
f = self.parsehelper(self.unsettest)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), None)
|
||||
self.assertEqual(d.getVarFlag("A","flag", True), None)
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
|
||||
|
||||
overridetest = """
|
||||
RRECOMMENDS_${PN} = "a"
|
||||
RRECOMMENDS_${PN}_libc = "b"
|
||||
OVERRIDES = "libc:${PN}"
|
||||
PN = "gtk+"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
d.setVar("RRECOMMENDS_gtk+", "c")
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
|
||||
|
||||
overridetest2 = """
|
||||
EXTRA_OECONF = ""
|
||||
EXTRA_OECONF_class-target = "b"
|
||||
EXTRA_OECONF_append = " c"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest2)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
d.appendVar("EXTRA_OECONF", " d")
|
||||
d.setVar("OVERRIDES", "class-target")
|
||||
self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
|
||||
|
||||
overridetest3 = """
|
||||
DESCRIPTION = "A"
|
||||
DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
|
||||
PN = "bc"
|
||||
"""
|
||||
|
||||
def test_parse_combinations(self):
|
||||
f = self.parsehelper(self.overridetest3)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
|
||||
d.setVar("DESCRIPTION", "E")
|
||||
d.setVar("DESCRIPTION_bc-dev", "C D")
|
||||
d.setVar("OVERRIDES", "bc-dev")
|
||||
self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
|
||||
|
||||
|
||||
classextend = """
|
||||
VAR_var_override1 = "B"
|
||||
EXTRA = ":override1"
|
||||
OVERRIDES = "nothing${EXTRA}"
|
||||
|
||||
BBCLASSEXTEND = "###CLASS###"
|
||||
"""
|
||||
classextend_bbclass = """
|
||||
EXTRA = ""
|
||||
python () {
|
||||
d.renameVar("VAR_var", "VAR_var2")
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# Test based upon a real world data corruption issue. One
|
||||
# data store changing a variable poked through into a different data
|
||||
# store. This test case replicates that issue where the value 'B' would
|
||||
# become unset/disappear.
|
||||
#
|
||||
def test_parse_classextend_contamination(self):
|
||||
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
|
||||
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
|
||||
self.classextend = self.classextend.replace("###CLASS###", cls.name)
|
||||
f = self.parsehelper(self.classextend)
|
||||
alldata = bb.parse.handle(f.name, self.d)
|
||||
d1 = alldata['']
|
||||
d2 = alldata[cls.name]
|
||||
self.assertEqual(d1.getVar("VAR_var", True), "B")
|
||||
self.assertEqual(d2.getVar("VAR_var", True), None)
|
||||
|
||||
@@ -22,8 +22,6 @@
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
import tempfile
|
||||
import re
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -107,497 +105,3 @@ class Path(unittest.TestCase):
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
|
||||
class EditMetadataFile(unittest.TestCase):
|
||||
_origfile = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
def _testeditfile(self, varvalues, compareto, dummyvars=None):
|
||||
if dummyvars is None:
|
||||
dummyvars = []
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(self._origfile)
|
||||
tf.close()
|
||||
try:
|
||||
varcalls = []
|
||||
def handle_file(varname, origvalue, op, newlines):
|
||||
self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
|
||||
self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
|
||||
varcalls.append(varname)
|
||||
return varvalues[varname]
|
||||
|
||||
bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
|
||||
with open(tf.name) as f:
|
||||
modfile = f.readlines()
|
||||
# Ensure the output matches the expected output
|
||||
self.assertEqual(compareto.splitlines(True), modfile)
|
||||
# Ensure the callback function was called for every variable we asked for
|
||||
# (plus allow testing behaviour when a requested variable is not present)
|
||||
self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_edit_metadata_file_nochange(self):
|
||||
# Test file doesn't get modified with nothing to do
|
||||
self._testeditfile({}, self._origfile)
|
||||
# Test file doesn't get modified with only dummy variables
|
||||
self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
|
||||
'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
|
||||
# Test file doesn't get modified with some the same values
|
||||
self._testeditfile({'THIS': ('that', None, 0, True),
|
||||
'OTHER': ('anothervalue', None, 0, True),
|
||||
'MULTILINE3': (' c1 c2 c3 ', None, 4, False)}, self._origfile)
|
||||
|
||||
def test_edit_metadata_file_1(self):
|
||||
|
||||
newfile1 = """
|
||||
# A comment
|
||||
HELLO = "newvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
|
||||
|
||||
|
||||
def test_edit_metadata_file_2(self):
|
||||
|
||||
newfile2 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = " \\
|
||||
d1 \\
|
||||
d2 \\
|
||||
d3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = "nowsingle"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
|
||||
'MULTILINE3': ('nowsingle', None, 4, True),
|
||||
'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
|
||||
|
||||
|
||||
def test_edit_metadata_file_3(self):
|
||||
|
||||
newfile3 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = "yetanothervalue"
|
||||
|
||||
MULTILINE = "e1 \\
|
||||
e2 \\
|
||||
e3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := "f1 \\
|
||||
\tf2 \\
|
||||
\t"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
othercommand_one a b c
|
||||
othercommand_two d e f
|
||||
}
|
||||
"""
|
||||
|
||||
self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
|
||||
'MULTILINE2': (['f1', 'f2'], None, '\t', True),
|
||||
'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
|
||||
'THIS': (None, None, 0, False),
|
||||
'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
|
||||
|
||||
|
||||
def test_edit_metadata_file_4(self):
|
||||
|
||||
newfile4 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
"""
|
||||
|
||||
self._testeditfile({'NOCHANGE': (None, None, 0, False),
|
||||
'MULTILINE3': (None, None, 0, False),
|
||||
'THIS': ('that', None, 0, False),
|
||||
'do_functionname()': (None, None, 0, False)}, newfile4)
|
||||
|
||||
|
||||
def test_edit_metadata(self):
|
||||
newfile5 = """
|
||||
# A comment
|
||||
HELLO = "hithere"
|
||||
|
||||
# A new comment
|
||||
THIS += "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
NEWVAR = "value"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname == 'THIS':
|
||||
newlines.append('# A new comment\n')
|
||||
elif varname == 'do_functionname()':
|
||||
newlines.append('NEWVAR = "value"\n')
|
||||
newlines.append('\n')
|
||||
valueitem = varvalues.get(varname, None)
|
||||
if valueitem:
|
||||
return valueitem
|
||||
else:
|
||||
return (origvalue, op, 0, True)
|
||||
|
||||
varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
|
||||
varlist = ['HELLO', 'THIS', 'do_functionname()']
|
||||
(updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
|
||||
self.assertTrue(updated, 'List should be updated but isn\'t')
|
||||
self.assertEqual(newlines, newfile5.splitlines(True))
|
||||
|
||||
# Make sure the orig value matches what we expect it to be
|
||||
def test_edit_metadata_origvalue(self):
|
||||
origfile = """
|
||||
MULTILINE = " stuff \\
|
||||
morestuff"
|
||||
"""
|
||||
expected_value = "stuff morestuff"
|
||||
global value_in_callback
|
||||
value_in_callback = ""
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
global value_in_callback
|
||||
value_in_callback = origvalue
|
||||
return (origvalue, op, -1, False)
|
||||
|
||||
bb.utils.edit_metadata(origfile.splitlines(True),
|
||||
['MULTILINE'],
|
||||
handle_var)
|
||||
|
||||
testvalue = re.sub('\s+', ' ', value_in_callback.strip())
|
||||
self.assertEqual(expected_value, testvalue)
|
||||
|
||||
class EditBbLayersConf(unittest.TestCase):
|
||||
|
||||
def _test_bblayers_edit(self, before, after, add, remove, notadded, notremoved):
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(before)
|
||||
tf.close()
|
||||
try:
|
||||
actual_notadded, actual_notremoved = bb.utils.edit_bblayers_conf(tf.name, add, remove)
|
||||
with open(tf.name) as f:
|
||||
actual_after = f.readlines()
|
||||
self.assertEqual(after.splitlines(True), actual_after)
|
||||
self.assertEqual(notadded, actual_notadded)
|
||||
self.assertEqual(notremoved, actual_notremoved)
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_bblayers_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
None,
|
||||
'/home/user/path/layer2',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/other/path/to/layer5/',
|
||||
None,
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/other/path/to/layer5', '/home/user/path/layer2/'], '/home/user/path/subpath/layer3/',
|
||||
['/home/user/path/layer2'],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_home(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer1 \
|
||||
~/path/layer2 \
|
||||
~/otherpath/layer3 \
|
||||
~/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer2 \
|
||||
~/path/layer4 \
|
||||
~/path2/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
[os.environ['HOME'] + '/path/layer4', '~/path2/layer5'],
|
||||
[os.environ['HOME'] + '/otherpath/layer3', '~/path/layer1', '~/path/notinlist'],
|
||||
[os.environ['HOME'] + '/path/layer4'],
|
||||
['~/path/notinlist'])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/home/user/path/layer3',
|
||||
'/home/user/path/layer1',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals2(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer4"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
BBLAYERS += "/home/user/otherpath/layer6"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/home/user/otherpath/layer6', '/home/user/path/layer3'], ['/home/user/path/layer1', '/home/user/path/layer4', '/home/user/path/layer7'],
|
||||
['/home/user/path/layer3'],
|
||||
['/home/user/path/layer7'])
|
||||
|
||||
@@ -36,13 +36,13 @@ class Tinfoil:
|
||||
|
||||
# Set up logging
|
||||
self.logger = logging.getLogger('BitBake')
|
||||
self._log_hdlr = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(self._log_hdlr)
|
||||
console = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
self._log_hdlr.setFormatter(format)
|
||||
self.logger.addHandler(self._log_hdlr)
|
||||
console.setFormatter(format)
|
||||
self.logger.addHandler(console)
|
||||
|
||||
self.config = CookerConfiguration()
|
||||
configparams = TinfoilConfigParameters(parse_only=True)
|
||||
@@ -59,12 +59,6 @@ class Tinfoil:
|
||||
def register_idle_function(self, function, data):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.shutdown()
|
||||
|
||||
def parseRecipes(self):
|
||||
sys.stderr.write("Parsing recipes..")
|
||||
self.logger.setLevel(logging.WARNING)
|
||||
@@ -80,57 +74,20 @@ class Tinfoil:
|
||||
self.logger.setLevel(logging.INFO)
|
||||
sys.stderr.write("done.\n")
|
||||
|
||||
self.cooker_data = self.cooker.recipecaches['']
|
||||
self.cooker_data = self.cooker.recipecache
|
||||
|
||||
def prepare(self, config_only = False):
|
||||
if not self.cooker_data:
|
||||
if config_only:
|
||||
self.cooker.parseConfiguration()
|
||||
self.cooker_data = self.cooker.recipecaches['']
|
||||
self.cooker_data = self.cooker.recipecache
|
||||
else:
|
||||
self.parseRecipes()
|
||||
|
||||
def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
|
||||
"""
|
||||
Parse the specified recipe file (with or without bbappends)
|
||||
and return a datastore object representing the environment
|
||||
for the recipe.
|
||||
Parameters:
|
||||
fn: recipe file to parse - can be a file path or virtual
|
||||
specification
|
||||
appends: True to apply bbappends, False otherwise
|
||||
appendlist: optional list of bbappend files to apply, if you
|
||||
want to filter them
|
||||
config_data: custom config datastore to use. NOTE: if you
|
||||
specify config_data then you cannot use a virtual
|
||||
specification for fn.
|
||||
"""
|
||||
if appends and appendlist == []:
|
||||
appends = False
|
||||
if appends:
|
||||
if appendlist:
|
||||
appendfiles = appendlist
|
||||
else:
|
||||
if not hasattr(self.cooker, 'collection'):
|
||||
raise Exception('You must call tinfoil.prepare() with config_only=False in order to get bbappends')
|
||||
appendfiles = self.cooker.collection.get_file_appends(fn)
|
||||
else:
|
||||
appendfiles = None
|
||||
if config_data:
|
||||
# We have to use a different function here if we're passing in a datastore
|
||||
localdata = bb.data.createCopy(config_data)
|
||||
envdata = bb.cache.parse_recipe(localdata, fn, appendfiles)['']
|
||||
else:
|
||||
# Use the standard path
|
||||
parser = bb.cache.NoCache(self.cooker.databuilder)
|
||||
envdata = parser.loadDataFull(fn, appendfiles)
|
||||
return envdata
|
||||
|
||||
def shutdown(self):
|
||||
self.cooker.shutdown(force=True)
|
||||
self.cooker.post_serve()
|
||||
self.cooker.unlockBitbake()
|
||||
self.logger.removeHandler(self._log_hdlr)
|
||||
|
||||
class TinfoilConfigParameters(ConfigParameters):
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
17
bitbake/lib/bb/ui/crumbs/__init__.py
Normal file
17
bitbake/lib/bb/ui/crumbs/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
#
|
||||
# Gtk+ UI pieces for BitBake
|
||||
#
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
437
bitbake/lib/bb/ui/crumbs/builddetailspage.py
Executable file
437
bitbake/lib/bb/ui/crumbs/builddetailspage.py
Executable file
@@ -0,0 +1,437 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import pango
|
||||
import gobject
|
||||
import bb.process
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton, HobInfoButton
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.runningbuild import BuildFailureTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class BuildConfigurationTreeView(gtk.TreeView):
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.set_rules_hint(False)
|
||||
self.set_headers_visible(False)
|
||||
self.set_property("hover-expand", True)
|
||||
self.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer0 = gtk.CellRendererText()
|
||||
renderer0.set_property('font-desc', pango.FontDescription('courier bold 12'))
|
||||
col0 = gtk.TreeViewColumn ("Name", renderer0, text=0)
|
||||
self.append_column (col0)
|
||||
|
||||
# The message of configuration.
|
||||
renderer1 = HobWarpCellRendererText(col_number=1)
|
||||
col1 = gtk.TreeViewColumn ("Values", renderer1, text=1)
|
||||
self.append_column (col1)
|
||||
|
||||
def set_vars(self, key="", var=[""]):
|
||||
d = {}
|
||||
if type(var) == str:
|
||||
d = {key: [var]}
|
||||
elif type(var) == list and len(var) > 1:
|
||||
#create the sub item line
|
||||
l = []
|
||||
text = ""
|
||||
for item in var:
|
||||
text = " - " + item
|
||||
l.append(text)
|
||||
d = {key: var}
|
||||
|
||||
return d
|
||||
|
||||
def set_config_model(self, show_vars):
|
||||
listmodel = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
parent = None
|
||||
for var in show_vars:
|
||||
for subitem in var.items():
|
||||
name = subitem[0]
|
||||
is_parent = True
|
||||
for value in subitem[1]:
|
||||
if is_parent:
|
||||
parent = listmodel.append(parent, (name, value))
|
||||
is_parent = False
|
||||
else:
|
||||
listmodel.append(parent, (None, value))
|
||||
name = " - "
|
||||
parent = None
|
||||
# renew the tree model after get the configuration messages
|
||||
self.set_model(listmodel)
|
||||
|
||||
def show(self, src_config_info, src_params):
|
||||
vars = []
|
||||
vars.append(self.set_vars("BB version:", src_params.bb_version))
|
||||
vars.append(self.set_vars("Target arch:", src_params.target_arch))
|
||||
vars.append(self.set_vars("Target OS:", src_params.target_os))
|
||||
vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
|
||||
vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
|
||||
vars.append(self.set_vars("Distro version:", src_params.distro_version))
|
||||
vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
|
||||
vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
|
||||
vars.append(self.set_vars("Layers:", src_config_info.layers))
|
||||
|
||||
for path in src_config_info.layers:
|
||||
import os, os.path
|
||||
if os.path.exists(path):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
if branch.startswith("fatal:"):
|
||||
branch = "(unknown)"
|
||||
if branch:
|
||||
branch = branch.strip('\n')
|
||||
vars.append(self.set_vars("Branch:", branch))
|
||||
break
|
||||
|
||||
self.set_config_model(vars)
|
||||
|
||||
def reset(self):
|
||||
self.set_model(None)
|
||||
|
||||
#
|
||||
# BuildDetailsPage
|
||||
#
|
||||
|
||||
class BuildDetailsPage (HobPage):
|
||||
|
||||
def __init__(self, builder):
|
||||
super(BuildDetailsPage, self).__init__(builder, "Building ...")
|
||||
|
||||
self.num_of_issues = 0
|
||||
self.endpath = (0,)
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.vbox = gtk.VBox(False, 12)
|
||||
|
||||
self.progress_box = gtk.VBox(False, 12)
|
||||
self.task_status = gtk.Label("\n") # to ensure layout is correct
|
||||
self.task_status.set_alignment(0.0, 0.5)
|
||||
self.progress_box.pack_start(self.task_status, expand=False, fill=False)
|
||||
self.progress_hbox = gtk.HBox(False, 6)
|
||||
self.progress_box.pack_end(self.progress_hbox, expand=True, fill=True)
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.progress_hbox.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.progress_hbox.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.notebook = HobNotebook()
|
||||
self.config_tv = BuildConfigurationTreeView()
|
||||
self.scrolled_view_config = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_config.add(self.config_tv)
|
||||
self.notebook.append_page(self.scrolled_view_config, "Build configuration")
|
||||
|
||||
self.failure_tv = BuildFailureTreeView()
|
||||
self.failure_model = self.builder.handler.build.model.failure_model()
|
||||
self.failure_tv.set_model(self.failure_model)
|
||||
self.scrolled_view_failure = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_failure.add(self.failure_tv)
|
||||
self.notebook.append_page(self.scrolled_view_failure, "Issues")
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view_build = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_build.add(self.build_tv)
|
||||
self.notebook.append_page(self.scrolled_view_build, "Log")
|
||||
|
||||
self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
|
||||
|
||||
self.button_box = gtk.HBox(False, 6)
|
||||
self.back_button = HobAltButton('<< Back')
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def update_build_status(self, current, total, task):
|
||||
recipe_path, recipe_task = task.split(", ")
|
||||
recipe = os.path.basename(recipe_path).rstrip(".bb")
|
||||
tsk_msg = "<b>Running task %s of %s:</b> %s\n<b>Recipe:</b> %s" % (current, total, recipe_task, recipe)
|
||||
self.task_status.set_markup(tsk_msg)
|
||||
self.stop_button.set_sensitive(True)
|
||||
|
||||
def reset_build_status(self):
|
||||
self.task_status.set_markup("\n") # to ensure layout is correct
|
||||
self.endpath = (0,)
|
||||
|
||||
def show_issues(self):
|
||||
self.num_of_issues += 1
|
||||
self.notebook.show_indicator_icon("Issues", self.num_of_issues)
|
||||
self.notebook.queue_draw()
|
||||
|
||||
def reset_issues(self):
|
||||
self.num_of_issues = 0
|
||||
self.notebook.hide_indicator_icon("Issues")
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.vbox.get_children() or []
|
||||
for child in children:
|
||||
self.vbox.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def add_build_fail_top_bar(self, actions, log_file=None):
|
||||
primary_action = "Edit %s" % actions
|
||||
|
||||
color = HobColors.ERROR
|
||||
build_fail_top = gtk.EventBox()
|
||||
#build_fail_top.set_size_request(-1, 200)
|
||||
build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
|
||||
build_fail_tab = gtk.Table(14, 46, True)
|
||||
build_fail_top.add(build_fail_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_fail_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_fail_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
# Ensure variable disk_full is defined
|
||||
if not hasattr(self.builder, 'disk_full'):
|
||||
self.builder.disk_full = False
|
||||
|
||||
if self.builder.disk_full:
|
||||
markup = "<span size='medium'>There is no disk space left, so Hob cannot finish building your image. Free up some disk space\n"
|
||||
markup += "and restart the build. Check the \"Issues\" tab for more details</span>"
|
||||
label.set_markup(markup)
|
||||
else:
|
||||
label.set_markup("<span size='medium'>Check the \"Issues\" information for more details</span>")
|
||||
build_fail_tab.attach(label, 4, 40, 4, 9)
|
||||
|
||||
# create button 'Edit packages'
|
||||
action_button = HobButton(primary_action)
|
||||
#action_button.set_size_request(-1, 40)
|
||||
action_button.set_tooltip_text("Edit the %s parameters" % actions)
|
||||
action_button.connect('clicked', self.failure_primary_action_button_clicked_cb, primary_action)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
file_bug_button = HobAltButton('File a bug')
|
||||
file_bug_button.set_relief(gtk.RELIEF_HALF)
|
||||
file_bug_button.set_tooltip_text("Open the Yocto Project bug tracking website")
|
||||
file_bug_button.connect('clicked', self.failure_activate_file_bug_link_cb)
|
||||
|
||||
if not self.builder.disk_full:
|
||||
build_fail_tab.attach(action_button, 4, 13, 9, 12)
|
||||
if log_file:
|
||||
build_fail_tab.attach(open_log_button, 14, 23, 9, 12)
|
||||
build_fail_tab.attach(file_bug_button, attach_pos, attach_pos + 9, 9, 12)
|
||||
|
||||
else:
|
||||
restart_build = HobButton("Restart the build")
|
||||
restart_build.set_tooltip_text("Restart the build")
|
||||
restart_build.connect('clicked', self.restart_build_button_clicked_cb)
|
||||
|
||||
build_fail_tab.attach(restart_build, 4, 13, 9, 12)
|
||||
build_fail_tab.attach(action_button, 14, 23, 9, 12)
|
||||
if log_file:
|
||||
build_fail_tab.attach(open_log_button, attach_pos, attach_pos + 9, 9, 12)
|
||||
|
||||
self.builder.disk_full = False
|
||||
return build_fail_top
|
||||
|
||||
def show_fail_page(self, title):
|
||||
self._remove_all_widget()
|
||||
self.title = "Hob cannot build your %s" % title
|
||||
|
||||
self.build_fail_bar = self.add_build_fail_top_bar(title, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_fail_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.notebook.set_page("Issues")
|
||||
self.back_button.hide()
|
||||
|
||||
def add_build_stop_top_bar(self, action, log_file=None):
|
||||
color = HobColors.LIGHT_GRAY
|
||||
build_stop_top = gtk.EventBox()
|
||||
#build_stop_top.set_size_request(-1, 200)
|
||||
build_stop_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
build_stop_top.set_flags(gtk.CAN_DEFAULT)
|
||||
build_stop_top.grab_default()
|
||||
|
||||
build_stop_tab = gtk.Table(11, 46, True)
|
||||
build_stop_top.add(build_stop_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INFO_HOVER_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_stop_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_stop_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
action_button = HobButton("Edit %s" % action)
|
||||
action_button.set_size_request(-1, 40)
|
||||
if action == "image":
|
||||
action_button.set_tooltip_text("Edit the image parameters")
|
||||
elif action == "recipes":
|
||||
action_button.set_tooltip_text("Edit the included recipes")
|
||||
elif action == "packages":
|
||||
action_button.set_tooltip_text("Edit the included packages")
|
||||
action_button.connect('clicked', self.stop_primary_action_button_clicked_cb, action)
|
||||
build_stop_tab.attach(action_button, 4, 13, 6, 9)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
build_stop_tab.attach(open_log_button, 14, 23, 6, 9)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
build_button = HobAltButton("Build new image")
|
||||
#build_button.set_size_request(-1, 40)
|
||||
build_button.set_tooltip_text("Create a new image from scratch")
|
||||
build_button.connect('clicked', self.new_image_button_clicked_cb)
|
||||
build_stop_tab.attach(build_button, attach_pos, attach_pos + 9, 6, 9)
|
||||
|
||||
return build_stop_top, action_button
|
||||
|
||||
def show_stop_page(self, action):
|
||||
self._remove_all_widget()
|
||||
self.title = "Build stopped"
|
||||
self.build_stop_bar, action_button = self.add_build_stop_top_bar(action, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_stop_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
return action_button
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
self.title = "Building packages ..."
|
||||
else:
|
||||
self.title = "Building image ..."
|
||||
self.build_details_top = self.add_onto_top_bar(None)
|
||||
self.pack_start(self.build_details_top, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.progress_bar.reset()
|
||||
self.config_tv.reset()
|
||||
self.vbox.pack_start(self.progress_box, expand=False, fill=False)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.notebook.set_page("Log")
|
||||
self.back_button.hide()
|
||||
|
||||
self.reset_build_status()
|
||||
self.reset_issues()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def new_image_button_clicked_cb(self, button):
|
||||
self.builder.reset()
|
||||
|
||||
def show_back_button(self):
|
||||
self.back_button.show()
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.builder.stop_build()
|
||||
|
||||
def hide_stop_button(self):
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.stop_button.hide()
|
||||
|
||||
def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
|
||||
if treeview and v_adj:
|
||||
if path[0] > self.endpath[0]: # check the event is a new row append or not
|
||||
self.endpath = path
|
||||
# check the gtk.adjustment position is at end boundary or not
|
||||
if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
|
||||
treeview.scroll_to_cell(path)
|
||||
|
||||
def show_configurations(self, configurations, params):
|
||||
self.config_tv.show(configurations, params)
|
||||
|
||||
def failure_primary_action_button_clicked_cb(self, button, action):
|
||||
if "Edit recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "Edit packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "Edit image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def restart_build_button_clicked_cb(self, button):
|
||||
self.builder.just_bake()
|
||||
|
||||
def stop_primary_action_button_clicked_cb(self, button, action):
|
||||
if "recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def open_log_button_clicked_cb(self, button, log_file):
|
||||
if log_file:
|
||||
log_file = "file:///" + log_file
|
||||
gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
|
||||
|
||||
def failure_activate_file_bug_link_cb(self, button):
|
||||
button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
|
||||
1475
bitbake/lib/bb/ui/crumbs/builder.py
Executable file
1475
bitbake/lib/bb/ui/crumbs/builder.py
Executable file
File diff suppressed because it is too large
Load Diff
455
bitbake/lib/bb/ui/crumbs/buildmanager.py
Normal file
455
bitbake/lib/bb/ui/crumbs/buildmanager.py
Normal file
@@ -0,0 +1,455 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import threading
|
||||
import os
|
||||
import datetime
|
||||
import time
|
||||
|
||||
class BuildConfiguration:
|
||||
""" Represents a potential *or* historic *or* concrete build. It
|
||||
encompasses all the things that we need to tell bitbake to do to make it
|
||||
build what we want it to build.
|
||||
|
||||
It also stored the metadata URL and the set of possible machines (and the
|
||||
distros / images / uris for these. Apart from the metdata URL these are
|
||||
not serialised to file (since they may be transient). In some ways this
|
||||
functionality might be shifted to the loader class."""
|
||||
|
||||
def __init__ (self):
|
||||
self.metadata_url = None
|
||||
|
||||
# Tuple of (distros, image, urls)
|
||||
self.machine_options = {}
|
||||
|
||||
self.machine = None
|
||||
self.distro = None
|
||||
self.image = None
|
||||
self.urls = []
|
||||
self.extra_urls = []
|
||||
self.extra_pkgs = []
|
||||
|
||||
def get_machines_model (self):
|
||||
model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
for machine in self.machine_options.keys():
|
||||
model.append ([machine])
|
||||
|
||||
return model
|
||||
|
||||
def get_distro_and_images_models (self, machine):
|
||||
distro_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for distro in self.machine_options[machine][0]:
|
||||
distro_model.append ([distro])
|
||||
|
||||
image_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for image in self.machine_options[machine][1]:
|
||||
image_model.append ([image])
|
||||
|
||||
return (distro_model, image_model)
|
||||
|
||||
def get_repos (self):
|
||||
self.urls = self.machine_options[self.machine][2]
|
||||
return self.urls
|
||||
|
||||
# It might be a lot lot better if we stored these in like, bitbake conf
|
||||
# file format.
|
||||
@staticmethod
|
||||
def load_from_file (filename):
|
||||
|
||||
conf = BuildConfiguration()
|
||||
with open(filename, "r") as f:
|
||||
for line in f:
|
||||
data = line.split (";")[1]
|
||||
if (line.startswith ("metadata-url;")):
|
||||
conf.metadata_url = data.strip()
|
||||
continue
|
||||
if (line.startswith ("url;")):
|
||||
conf.urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("extra-url;")):
|
||||
conf.extra_urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("machine;")):
|
||||
conf.machine = data.strip()
|
||||
continue
|
||||
if (line.startswith ("distribution;")):
|
||||
conf.distro = data.strip()
|
||||
continue
|
||||
if (line.startswith ("image;")):
|
||||
conf.image = data.strip()
|
||||
continue
|
||||
|
||||
return conf
|
||||
|
||||
# Serialise to a file. This is part of the build process and we use this
|
||||
# to be able to repeat a given build (using the same set of parameters)
|
||||
# but also so that we can include the details of the image / machine /
|
||||
# distro in the build manager tree view.
|
||||
def write_to_file (self, filename):
|
||||
f = open (filename, "w")
|
||||
|
||||
lines = []
|
||||
|
||||
if (self.metadata_url):
|
||||
lines += ["metadata-url;%s\n" % (self.metadata_url)]
|
||||
|
||||
for url in self.urls:
|
||||
lines += ["url;%s\n" % (url)]
|
||||
|
||||
for url in self.extra_urls:
|
||||
lines += ["extra-url;%s\n" % (url)]
|
||||
|
||||
if (self.machine):
|
||||
lines += ["machine;%s\n" % (self.machine)]
|
||||
|
||||
if (self.distro):
|
||||
lines += ["distribution;%s\n" % (self.distro)]
|
||||
|
||||
if (self.image):
|
||||
lines += ["image;%s\n" % (self.image)]
|
||||
|
||||
f.writelines (lines)
|
||||
f.close ()
|
||||
|
||||
class BuildResult(gobject.GObject):
|
||||
""" Represents an historic build. Perhaps not successful. But it includes
|
||||
things such as the files that are in the directory (the output from the
|
||||
build) as well as a deserialised BuildConfiguration file that is stored in
|
||||
".conf" in the directory for the build.
|
||||
|
||||
This is GObject so that it can be included in the TreeStore."""
|
||||
|
||||
(STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
|
||||
(0, 1, 2)
|
||||
|
||||
def __init__ (self, parent, identifier):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.date = None
|
||||
|
||||
self.files = []
|
||||
self.status = None
|
||||
self.identifier = identifier
|
||||
self.path = os.path.join (parent, identifier)
|
||||
|
||||
# Extract the date, since the directory name is of the
|
||||
# format build-<year><month><day>-<ordinal> we can easily
|
||||
# pull it out.
|
||||
# TODO: Better to stat a file?
|
||||
(_, date, revision) = identifier.split ("-")
|
||||
print(date)
|
||||
|
||||
year = int (date[0:4])
|
||||
month = int (date[4:6])
|
||||
day = int (date[6:8])
|
||||
|
||||
self.date = datetime.date (year, month, day)
|
||||
|
||||
self.conf = None
|
||||
|
||||
# By default builds are STATE_FAILED unless we find a "complete" file
|
||||
# in which case they are STATE_COMPLETE
|
||||
self.state = BuildResult.STATE_FAILED
|
||||
for file in os.listdir (self.path):
|
||||
if (file.startswith (".conf")):
|
||||
conffile = os.path.join (self.path, file)
|
||||
self.conf = BuildConfiguration.load_from_file (conffile)
|
||||
elif (file.startswith ("complete")):
|
||||
self.state = BuildResult.STATE_COMPLETE
|
||||
else:
|
||||
self.add_file (file)
|
||||
|
||||
def add_file (self, file):
|
||||
# Just add the file for now. Don't care about the type.
|
||||
self.files += [(file, None)]
|
||||
|
||||
class BuildManagerModel (gtk.TreeStore):
|
||||
""" Model for the BuildManagerTreeView. This derives from gtk.TreeStore
|
||||
but it abstracts nicely what the columns mean and the setup of the columns
|
||||
in the model. """
|
||||
|
||||
(COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
|
||||
(0, 1, 2, 3, 4, 5, 6)
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_OBJECT,
|
||||
gobject.TYPE_INT64,
|
||||
gobject.TYPE_INT)
|
||||
|
||||
class BuildManager (gobject.GObject):
|
||||
""" This class manages the historic builds that have been found in the
|
||||
"results" directory but is also used for starting a new build."""
|
||||
|
||||
__gsignals__ = {
|
||||
'population-finished' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'populate-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
|
||||
def update_build_result (self, result, iter):
|
||||
# Convert the date into something we can sort by.
|
||||
date = long (time.mktime (result.date.timetuple()))
|
||||
|
||||
# Add a top level entry for the build
|
||||
|
||||
self.model.set (iter,
|
||||
BuildManagerModel.COL_IDENT, result.identifier,
|
||||
BuildManagerModel.COL_DESC, result.conf.image,
|
||||
BuildManagerModel.COL_MACHINE, result.conf.machine,
|
||||
BuildManagerModel.COL_DISTRO, result.conf.distro,
|
||||
BuildManagerModel.COL_BUILD_RESULT, result,
|
||||
BuildManagerModel.COL_DATE, date,
|
||||
BuildManagerModel.COL_STATE, result.state)
|
||||
|
||||
# And then we use the files in the directory as the children for the
|
||||
# top level iter.
|
||||
for file in result.files:
|
||||
self.model.append (iter, (None, file[0], None, None, None, date, -1))
|
||||
|
||||
# This function is called as an idle by the BuildManagerPopulaterThread
|
||||
def add_build_result (self, result):
|
||||
gtk.gdk.threads_enter()
|
||||
self.known_builds += [result]
|
||||
|
||||
self.update_build_result (result, self.model.append (None))
|
||||
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
def notify_build_finished (self):
|
||||
# This is a bit of a hack. If we have a running build running then we
|
||||
# will have a row in the model in STATE_ONGOING. Find it and make it
|
||||
# as if it was a proper historic build (well, it is completed now....)
|
||||
|
||||
# We need to use the iters here rather than the Python iterator
|
||||
# interface to the model since we need to pass it into
|
||||
# update_build_result
|
||||
|
||||
iter = self.model.get_iter_first()
|
||||
|
||||
while (iter):
|
||||
(ident, state) = self.model.get(iter,
|
||||
BuildManagerModel.COL_IDENT,
|
||||
BuildManagerModel.COL_STATE)
|
||||
|
||||
if state == BuildResult.STATE_ONGOING:
|
||||
result = BuildResult (self.results_directory, ident)
|
||||
self.update_build_result (result, iter)
|
||||
iter = self.model.iter_next(iter)
|
||||
|
||||
def notify_build_succeeded (self):
|
||||
# Write the "complete" file so that when we create the BuildResult
|
||||
# object we put into the model
|
||||
|
||||
complete_file_path = os.path.join (self.cur_build_directory, "complete")
|
||||
f = file (complete_file_path, "w")
|
||||
f.close()
|
||||
self.notify_build_finished()
|
||||
|
||||
def notify_build_failed (self):
|
||||
# Without a "complete" file then this will mark the build as failed:
|
||||
self.notify_build_finished()
|
||||
|
||||
# This function is called as an idle
|
||||
def emit_population_finished_signal (self):
|
||||
gtk.gdk.threads_enter()
|
||||
self.emit ("population-finished")
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
class BuildManagerPopulaterThread (threading.Thread):
|
||||
def __init__ (self, manager, directory):
|
||||
threading.Thread.__init__ (self)
|
||||
self.manager = manager
|
||||
self.directory = directory
|
||||
|
||||
def run (self):
|
||||
# For each of the "build-<...>" directories ..
|
||||
|
||||
if os.path.exists (self.directory):
|
||||
for directory in os.listdir (self.directory):
|
||||
|
||||
if not directory.startswith ("build-"):
|
||||
continue
|
||||
|
||||
build_result = BuildResult (self.directory, directory)
|
||||
self.manager.add_build_result (build_result)
|
||||
|
||||
gobject.idle_add (BuildManager.emit_population_finished_signal,
|
||||
self.manager)
|
||||
|
||||
def __init__ (self, server, results_directory):
|
||||
gobject.GObject.__init__ (self)
|
||||
|
||||
# The builds that we've found from walking the result directory
|
||||
self.known_builds = []
|
||||
|
||||
# Save out the bitbake server, we need this for issuing commands to
|
||||
# the cooker:
|
||||
self.server = server
|
||||
|
||||
# The TreeStore that we use
|
||||
self.model = BuildManagerModel ()
|
||||
|
||||
# The results directory is where we create (and look for) the
|
||||
# build-<xyz>-<n> directories. We need to populate ourselves from
|
||||
# directory
|
||||
self.results_directory = results_directory
|
||||
self.populate_from_directory (self.results_directory)
|
||||
|
||||
def populate_from_directory (self, directory):
|
||||
thread = BuildManager.BuildManagerPopulaterThread (self, directory)
|
||||
thread.start()
|
||||
|
||||
# Come up with the name for the next build ident by combining "build-"
|
||||
# with the date formatted as yyyymmdd and then an ordinal. We do this by
|
||||
# an optimistic algorithm incrementing the ordinal if we find that it
|
||||
# already exists.
|
||||
def get_next_build_ident (self):
|
||||
today = datetime.date.today ()
|
||||
datestr = str (today.year) + str (today.month) + str (today.day)
|
||||
|
||||
revision = 0
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
while (os.path.exists (test_path)):
|
||||
revision += 1
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
return test_name
|
||||
|
||||
# Take a BuildConfiguration and then try and build it based on the
|
||||
# parameters of that configuration. S
|
||||
def do_build (self, conf):
|
||||
server = self.server
|
||||
|
||||
# Work out the build directory. Note we actually create the
|
||||
# directories here since we need to write the ".conf" file. Otherwise
|
||||
# we could have relied on bitbake's builder thread to actually make
|
||||
# the directories as it proceeds with the build.
|
||||
ident = self.get_next_build_ident ()
|
||||
build_directory = os.path.join (self.results_directory,
|
||||
ident)
|
||||
self.cur_build_directory = build_directory
|
||||
os.makedirs (build_directory)
|
||||
|
||||
conffile = os.path.join (build_directory, ".conf")
|
||||
conf.write_to_file (conffile)
|
||||
|
||||
# Add a row to the model representing this ongoing build. It's kinda a
|
||||
# fake entry. If this build completes or fails then this gets updated
|
||||
# with the real stuff like the historic builds
|
||||
date = long (time.time())
|
||||
self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
|
||||
None, date, BuildResult.STATE_ONGOING))
|
||||
try:
|
||||
server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
|
||||
server.runCommand(["setVariable", "MACHINE", conf.machine])
|
||||
server.runCommand(["setVariable", "DISTRO", conf.distro])
|
||||
server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
|
||||
server.runCommand(["setVariable", "BBFILES", \
|
||||
"""${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
|
||||
server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
|
||||
server.runCommand(["setVariable", "IPK_FEED_URIS", \
|
||||
" ".join(conf.get_repos())])
|
||||
server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
|
||||
build_directory])
|
||||
server.runCommand(["buildTargets", [conf.image], "rootfs"])
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
class BuildManagerTreeView (gtk.TreeView):
|
||||
""" The tree view for the build manager. This shows the historic builds
|
||||
and so forth. """
|
||||
|
||||
# We use this function to control what goes in the cell since we store
|
||||
# the date in the model as seconds since the epoch (for sorting) and so we
|
||||
# need to make it human readable.
|
||||
def date_format_custom_cell_data_func (self, col, cell, model, iter):
|
||||
date = model.get (iter, BuildManagerModel.COL_DATE)[0]
|
||||
datestr = time.strftime("%A %d %B %Y", time.localtime(date))
|
||||
cell.set_property ("text", datestr)
|
||||
|
||||
# This format function controls what goes in the cell. We use this to map
|
||||
# the integer state to a string and also to colourise the text
|
||||
def state_format_custom_cell_data_fun (self, col, cell, model, iter):
|
||||
state = model.get (iter, BuildManagerModel.COL_STATE)[0]
|
||||
|
||||
if (state == BuildResult.STATE_ONGOING):
|
||||
cell.set_property ("text", "Active")
|
||||
cell.set_property ("foreground", "#000000")
|
||||
elif (state == BuildResult.STATE_FAILED):
|
||||
cell.set_property ("text", "Failed")
|
||||
cell.set_property ("foreground", "#ff0000")
|
||||
elif (state == BuildResult.STATE_COMPLETE):
|
||||
cell.set_property ("text", "Complete")
|
||||
cell.set_property ("foreground", "#00ff00")
|
||||
else:
|
||||
cell.set_property ("text", "")
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
|
||||
# Misc descriptiony thing
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn (None, renderer,
|
||||
text=BuildManagerModel.COL_DESC)
|
||||
self.append_column (col)
|
||||
|
||||
# Machine
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Machine", renderer,
|
||||
text=BuildManagerModel.COL_MACHINE)
|
||||
self.append_column (col)
|
||||
|
||||
# distro
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Distribution", renderer,
|
||||
text=BuildManagerModel.COL_DISTRO)
|
||||
self.append_column (col)
|
||||
|
||||
# date (using a custom function for formatting the cell contents it
|
||||
# takes epoch -> human readable string)
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Date", renderer,
|
||||
text=BuildManagerModel.COL_DATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.date_format_custom_cell_data_func)
|
||||
|
||||
# For status.
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer,
|
||||
text = BuildManagerModel.COL_STATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.state_format_custom_cell_data_fun)
|
||||
341
bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
Normal file
341
bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
Normal file
@@ -0,0 +1,341 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import hashlib
|
||||
from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class AdvancedSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
|
||||
def details_cb(self, button, parent, protocol):
|
||||
dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
|
||||
user = self.configuration.proxies[protocol][1],
|
||||
passwd = self.configuration.proxies[protocol][2],
|
||||
parent = parent,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_OK:
|
||||
self.configuration.proxies[protocol][1] = dialog.user
|
||||
self.configuration.proxies[protocol][2] = dialog.passwd
|
||||
self.refresh_proxy_components()
|
||||
dialog.destroy()
|
||||
|
||||
def set_save_button(self, button):
|
||||
self.save_button = button
|
||||
|
||||
def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
|
||||
combo_item = self.rootfs_combo.get_active_text()
|
||||
modified = False
|
||||
for child in check_hbox.get_children():
|
||||
if isinstance(child, gtk.CheckButton):
|
||||
check_hbox.remove(child)
|
||||
modified = True
|
||||
for format in all_package_format:
|
||||
if format != combo_item:
|
||||
check_button = gtk.CheckButton(format)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
modified = True
|
||||
if modified:
|
||||
check_hbox.remove(self.pkgfmt_info)
|
||||
check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
|
||||
check_hbox.show_all()
|
||||
|
||||
def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
|
||||
pkgfmt_vbox = gtk.VBox(False, 6)
|
||||
|
||||
label = self.gen_label_widget("Root file system package format")
|
||||
pkgfmt_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
rootfs_format = ""
|
||||
if curr_package_format:
|
||||
rootfs_format = curr_package_format.split()[0]
|
||||
|
||||
rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
|
||||
pkgfmt_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
|
||||
|
||||
label = self.gen_label_widget("Additional package formats")
|
||||
pkgfmt_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
check_hbox = gtk.HBox(False, 12)
|
||||
pkgfmt_vbox.pack_start(check_hbox, expand=False, fill=False)
|
||||
for format in all_package_format:
|
||||
if format != rootfs_format:
|
||||
check_button = gtk.CheckButton(format)
|
||||
is_active = (format in curr_package_format.split())
|
||||
check_button.set_active(is_active)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
|
||||
self.pkgfmt_info = HobInfoButton(tooltip_extra, self)
|
||||
check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
|
||||
|
||||
rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
|
||||
|
||||
pkgfmt_vbox.show_all()
|
||||
|
||||
return pkgfmt_vbox, rootfs_combo, check_hbox
|
||||
|
||||
def __init__(self, title, configuration, all_image_types,
|
||||
all_package_formats, all_distros, all_sdk_machines,
|
||||
max_threads, parent, flags, buttons=None):
|
||||
super(AdvancedSettingsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
# bitbake settings from Builder.Configuration
|
||||
self.configuration = configuration
|
||||
self.image_types = all_image_types
|
||||
self.all_package_formats = all_package_formats
|
||||
self.all_distros = all_distros[:]
|
||||
self.all_sdk_machines = all_sdk_machines
|
||||
self.max_threads = max_threads
|
||||
|
||||
# class members for internal use
|
||||
self.distro_combo = None
|
||||
self.dldir_text = None
|
||||
self.sstatedir_text = None
|
||||
self.sstatemirror_text = None
|
||||
self.bb_spinner = None
|
||||
self.pmake_spinner = None
|
||||
self.rootfs_size_spinner = None
|
||||
self.extra_size_spinner = None
|
||||
self.gplv3_checkbox = None
|
||||
self.sdk_checkbox = None
|
||||
self.image_types_checkbuttons = {}
|
||||
|
||||
self.md5 = self.config_md5()
|
||||
self.settings_changed = False
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.save_button = None
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def _get_sorted_value(self, var):
|
||||
return " ".join(sorted(str(var).split())) + "\n"
|
||||
|
||||
def config_md5(self):
|
||||
data = ""
|
||||
data += ("PACKAGE_CLASSES: " + self.configuration.curr_package_format + '\n')
|
||||
data += ("DISTRO: " + self._get_sorted_value(self.configuration.curr_distro))
|
||||
data += ("IMAGE_ROOTFS_SIZE: " + self._get_sorted_value(self.configuration.image_rootfs_size))
|
||||
data += ("IMAGE_EXTRA_SIZE: " + self._get_sorted_value(self.configuration.image_extra_size))
|
||||
data += ("INCOMPATIBLE_LICENSE: " + self._get_sorted_value(self.configuration.incompat_license))
|
||||
data += ("SDK_MACHINE: " + self._get_sorted_value(self.configuration.curr_sdk_machine))
|
||||
data += ("TOOLCHAIN_BUILD: " + self._get_sorted_value(self.configuration.toolchain_build))
|
||||
data += ("IMAGE_FSTYPES: " + self._get_sorted_value(self.configuration.image_fstypes))
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.nb = gtk.Notebook()
|
||||
self.nb.set_show_tabs(True)
|
||||
self.nb.append_page(self.create_image_types_page(), gtk.Label("Image types"))
|
||||
self.nb.append_page(self.create_output_page(), gtk.Label("Output"))
|
||||
self.nb.set_current_page(0)
|
||||
self.vbox.pack_start(self.nb, expand=True, fill=True)
|
||||
self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def get_num_checked_image_types(self):
|
||||
total = 0
|
||||
for b in self.image_types_checkbuttons.values():
|
||||
if b.get_active():
|
||||
total = total + 1
|
||||
return total
|
||||
|
||||
def set_save_button_state(self):
|
||||
if self.save_button:
|
||||
self.save_button.set_sensitive(self.get_num_checked_image_types() > 0)
|
||||
|
||||
def image_type_checkbutton_clicked_cb(self, button):
|
||||
self.set_save_button_state()
|
||||
if self.get_num_checked_image_types() == 0:
|
||||
# Show an error dialog
|
||||
lbl = "<b>Select an image type</b>"
|
||||
msg = "You need to select at least one image type."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
|
||||
button = dialog.add_button("OK", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def create_image_types_page(self):
|
||||
main_vbox = gtk.VBox(False, 16)
|
||||
main_vbox.set_border_width(6)
|
||||
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
distro_vbox = gtk.VBox(False, 6)
|
||||
label = self.gen_label_widget("Distro:")
|
||||
tooltip = "Selects the Yocto Project distribution you want"
|
||||
try:
|
||||
i = self.all_distros.index( "defaultsetup" )
|
||||
except ValueError:
|
||||
i = -1
|
||||
if i != -1:
|
||||
self.all_distros[ i ] = "Default"
|
||||
if self.configuration.curr_distro == "defaultsetup":
|
||||
self.configuration.curr_distro = "Default"
|
||||
distro_widget, self.distro_combo = self.gen_combo_widget(self.configuration.curr_distro, self.all_distros,"<b>Distro</b>" + "*" + tooltip)
|
||||
distro_vbox.pack_start(label, expand=False, fill=False)
|
||||
distro_vbox.pack_start(distro_widget, expand=False, fill=False)
|
||||
main_vbox.pack_start(distro_vbox, expand=False, fill=False)
|
||||
|
||||
|
||||
rows = (len(self.image_types)+1)/3
|
||||
table = gtk.Table(rows + 1, 10, True)
|
||||
advanced_vbox.pack_start(table, expand=False, fill=False)
|
||||
|
||||
tooltip = "Image file system types you want."
|
||||
info = HobInfoButton("<b>Image types</b>" + "*" + tooltip, self)
|
||||
label = self.gen_label_widget("Image types:")
|
||||
align = gtk.Alignment(0, 0.5, 0, 0)
|
||||
table.attach(align, 0, 4, 0, 1)
|
||||
align.add(label)
|
||||
table.attach(info, 4, 5, 0, 1)
|
||||
|
||||
i = 1
|
||||
j = 1
|
||||
for image_type in sorted(self.image_types):
|
||||
self.image_types_checkbuttons[image_type] = gtk.CheckButton(image_type)
|
||||
self.image_types_checkbuttons[image_type].connect("toggled", self.image_type_checkbutton_clicked_cb)
|
||||
article = ""
|
||||
if image_type.startswith(("a", "e", "i", "o", "u")):
|
||||
article = "n"
|
||||
if image_type == "live":
|
||||
self.image_types_checkbuttons[image_type].set_tooltip_text("Build iso and hddimg images")
|
||||
else:
|
||||
self.image_types_checkbuttons[image_type].set_tooltip_text("Build a%s %s image" % (article, image_type))
|
||||
table.attach(self.image_types_checkbuttons[image_type], j - 1, j + 3, i, i + 1)
|
||||
if image_type in self.configuration.image_fstypes.split():
|
||||
self.image_types_checkbuttons[image_type].set_active(True)
|
||||
i += 1
|
||||
if i > rows:
|
||||
i = 1
|
||||
j = j + 4
|
||||
|
||||
main_vbox.pack_start(advanced_vbox, expand=False, fill=False)
|
||||
self.set_save_button_state()
|
||||
|
||||
return main_vbox
|
||||
|
||||
def create_output_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Package format</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
tooltip_combo = "Selects the package format used to generate rootfs."
|
||||
tooltip_extra = "Selects extra package formats to build"
|
||||
pkgfmt_widget, self.rootfs_combo, self.check_hbox = self.gen_pkgfmt_widget(self.configuration.curr_package_format, self.all_package_formats,"<b>Root file system package format</b>" + "*" + tooltip_combo,"<b>Additional package formats</b>" + "*" + tooltip_extra)
|
||||
sub_vbox.pack_start(pkgfmt_widget, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Image size</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Image basic size (in MB)")
|
||||
tooltip = "Defines the size for the generated image. The OpenEmbedded build system determines the final size for the generated image using an algorithm that takes into account the initial disk space used for the generated image, the Image basic size value, and the Additional free space value.\n\nFor more information, check the <a href=\"http://www.yoctoproject.org/docs/current/poky-ref-manual/poky-ref-manual.html#var-IMAGE_ROOTFS_SIZE\">Yocto Project Reference Manual</a>."
|
||||
rootfs_size_widget, self.rootfs_size_spinner = self.gen_spinner_widget(int(self.configuration.image_rootfs_size*1.0/1024), 0, 65536,"<b>Image basic size</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(rootfs_size_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Additional free space (in MB)")
|
||||
tooltip = "Sets extra free disk space to be added to the generated image. Use this variable when you want to ensure that a specific amount of free disk space is available on a device after an image is installed and running."
|
||||
extra_size_widget, self.extra_size_spinner = self.gen_spinner_widget(int(self.configuration.image_extra_size*1.0/1024), 0, 65536,"<b>Additional free space</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(extra_size_widget, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Licensing</span>'), expand=False, fill=False)
|
||||
self.gplv3_checkbox = gtk.CheckButton("Exclude GPLv3 packages")
|
||||
self.gplv3_checkbox.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.gplv3_checkbox.set_active(True)
|
||||
else:
|
||||
self.gplv3_checkbox.set_active(False)
|
||||
advanced_vbox.pack_start(self.gplv3_checkbox, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">SDK</span>'), expand=False, fill=False)
|
||||
sub_hbox = gtk.HBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_hbox, expand=False, fill=False)
|
||||
self.sdk_checkbox = gtk.CheckButton("Populate SDK")
|
||||
tooltip = "Check this box to generate an SDK tarball that consists of the cross-toolchain and a sysroot that contains development packages for your image."
|
||||
self.sdk_checkbox.set_tooltip_text(tooltip)
|
||||
self.sdk_checkbox.set_active(self.configuration.toolchain_build)
|
||||
sub_hbox.pack_start(self.sdk_checkbox, expand=False, fill=False)
|
||||
|
||||
tooltip = "Select the host platform for which you want to run the toolchain contained in the SDK tarball."
|
||||
sdk_machine_widget, self.sdk_machine_combo = self.gen_combo_widget(self.configuration.curr_sdk_machine, self.all_sdk_machines,"<b>Populate SDK</b>" + "*" + tooltip)
|
||||
sub_hbox.pack_start(sdk_machine_widget, expand=False, fill=False)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
package_format = []
|
||||
package_format.append(self.rootfs_combo.get_active_text())
|
||||
for child in self.check_hbox:
|
||||
if isinstance(child, gtk.CheckButton) and child.get_active():
|
||||
package_format.append(child.get_label())
|
||||
self.configuration.curr_package_format = " ".join(package_format)
|
||||
|
||||
distro = self.distro_combo.get_active_text()
|
||||
if distro == "Default":
|
||||
distro = "defaultsetup"
|
||||
self.configuration.curr_distro = distro
|
||||
self.configuration.image_rootfs_size = self.rootfs_size_spinner.get_value_as_int() * 1024
|
||||
self.configuration.image_extra_size = self.extra_size_spinner.get_value_as_int() * 1024
|
||||
|
||||
self.configuration.image_fstypes = ""
|
||||
for image_type in self.image_types:
|
||||
if self.image_types_checkbuttons[image_type].get_active():
|
||||
self.configuration.image_fstypes += (" " + image_type)
|
||||
self.configuration.image_fstypes.strip()
|
||||
|
||||
if self.gplv3_checkbox.get_active():
|
||||
if "GPLv3" not in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license += " GPLv3"
|
||||
else:
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.split().remove("GPLv3")
|
||||
self.configuration.incompat_license = " ".join(self.configuration.incompat_license or [])
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.strip()
|
||||
|
||||
self.configuration.toolchain_build = self.sdk_checkbox.get_active()
|
||||
self.configuration.curr_sdk_machine = self.sdk_machine_combo.get_active_text()
|
||||
md5 = self.config_md5()
|
||||
self.settings_changed = (self.md5 != md5)
|
||||
44
bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
Normal file
44
bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CrumbsDialog(gtk.Dialog):
|
||||
"""
|
||||
A GNOME HIG compliant dialog widget.
|
||||
Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
|
||||
"""
|
||||
def __init__(self, title="", parent=None, flags=0, buttons=None):
|
||||
super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.set_property("has-separator", False) # note: deprecated in 2.22
|
||||
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 12)
|
||||
self.action_area.set_property("spacing", 12)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
70
bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
Normal file
70
bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glib
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobwidget import HobIconChecker
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CrumbsMessageDialog(gtk.MessageDialog):
|
||||
"""
|
||||
A GNOME HIG compliant dialog widget.
|
||||
Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
|
||||
"""
|
||||
def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
|
||||
super(CrumbsMessageDialog, self).__init__(None,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
dialog_type,
|
||||
gtk.BUTTONS_NONE,
|
||||
None)
|
||||
|
||||
self.set_skip_taskbar_hint(False)
|
||||
|
||||
self.set_markup(label)
|
||||
|
||||
if 0 <= len(msg) < 300:
|
||||
self.format_secondary_markup(msg)
|
||||
else:
|
||||
vbox = self.get_message_area()
|
||||
vbox.set_border_width(1)
|
||||
vbox.set_property("spacing", 12)
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.msgView = gtk.TextView()
|
||||
self.msgView.set_editable(False)
|
||||
self.msgView.set_wrap_mode(gtk.WRAP_WORD)
|
||||
self.msgView.set_cursor_visible(False)
|
||||
self.msgView.set_size_request(300, 300)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(msg)
|
||||
self.msgView.set_buffer(self.buf)
|
||||
self.textWindow.add(self.msgView)
|
||||
self.msgView.show()
|
||||
vbox.add(self.textWindow)
|
||||
self.textWindow.show()
|
||||
219
bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
Normal file
219
bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
Normal file
@@ -0,0 +1,219 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glob
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import tempfile
|
||||
from bb.ui.crumbs.hobwidget import hic, HobButton
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
import bb.ui.crumbs.utils
|
||||
import bb.process
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class DeployImageDialog (CrumbsDialog):
|
||||
|
||||
__dummy_usb__ = "--select a usb drive--"
|
||||
|
||||
def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
|
||||
super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.image_path = image_path
|
||||
self.standalone = standalone
|
||||
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(600, 400)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
|
||||
label.set_markup(markup)
|
||||
self.vbox.pack_start(label, expand=False, fill=False, padding=2)
|
||||
|
||||
table = gtk.Table(2, 10, False)
|
||||
table.set_col_spacings(5)
|
||||
table.set_row_spacings(5)
|
||||
self.vbox.pack_start(table, expand=True, fill=True)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
tv = gtk.TextView()
|
||||
tv.set_editable(False)
|
||||
tv.set_wrap_mode(gtk.WRAP_WORD)
|
||||
tv.set_cursor_visible(False)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(self.image_path)
|
||||
tv.set_buffer(self.buf)
|
||||
scroll.add(tv)
|
||||
table.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
# There are 2 ways to use DeployImageDialog
|
||||
# One way is that called by HOB when the 'Deploy Image' button is clicked
|
||||
# The other way is that called by a standalone script.
|
||||
# Following block of codes handles the latter way. It adds a 'Select Image' button and
|
||||
# emit a signal when the button is clicked.
|
||||
if self.standalone:
|
||||
gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
|
||||
gobject.TYPE_NONE, ())
|
||||
icon = gtk.Image()
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
button = gtk.Button("Select Image")
|
||||
button.set_image(icon)
|
||||
#button.set_size_request(140, 50)
|
||||
table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
|
||||
button.connect("clicked", self.select_image_button_clicked_cb)
|
||||
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
|
||||
|
||||
self.usb_desc = gtk.Label()
|
||||
self.usb_desc.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
self.usb_combo = gtk.combo_box_new_text()
|
||||
self.usb_combo.connect("changed", self.usb_combo_changed_cb)
|
||||
model = self.usb_combo.get_model()
|
||||
model.clear()
|
||||
self.usb_combo.append_text(self.__dummy_usb__)
|
||||
for usb in self.find_all_usb_devices():
|
||||
self.usb_combo.append_text("/dev/" + usb)
|
||||
self.usb_combo.set_active(0)
|
||||
self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
|
||||
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
|
||||
|
||||
self.vbox.show_all()
|
||||
self.progress_bar.hide()
|
||||
|
||||
def set_image_text_buffer(self, image_path):
|
||||
self.buf.set_text(image_path)
|
||||
|
||||
def set_image_path(self, image_path):
|
||||
self.image_path = image_path
|
||||
|
||||
def popen_read(self, cmd):
|
||||
tmpout, errors = bb.process.run("%s" % cmd)
|
||||
return tmpout.strip()
|
||||
|
||||
def find_all_usb_devices(self):
|
||||
usb_devs = [ os.readlink(u)
|
||||
for u in glob.glob('/dev/disk/by-id/usb*')
|
||||
if not re.search(r'part\d+', u) ]
|
||||
return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
|
||||
|
||||
def get_usb_info(self, dev):
|
||||
return "%s %s" % \
|
||||
(self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
|
||||
self.popen_read('cat /sys/class/block/%s/device/model' % dev))
|
||||
|
||||
def select_image_button_clicked_cb(self, button):
|
||||
self.emit('select_image_clicked')
|
||||
|
||||
def usb_combo_changed_cb(self, usb_combo):
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_usb__:
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
else:
|
||||
markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
lbl = ''
|
||||
msg = ''
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
|
||||
cmdline = bb.ui.crumbs.utils.which_terminal()
|
||||
if cmdline:
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
cmdline += "\"sudo dd if=" + self.image_path + \
|
||||
" of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
|
||||
subprocess.call(shlex.split(cmdline))
|
||||
|
||||
if int(tmpfile.readline().strip()) == 0:
|
||||
lbl = "<b>Deploy image successfully.</b>"
|
||||
else:
|
||||
lbl = "<b>Failed to deploy image.</b>"
|
||||
msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
|
||||
tmpfile.close()
|
||||
else:
|
||||
if not self.image_path:
|
||||
lbl = "<b>No selection made.</b>"
|
||||
msg = "You have not selected an image to deploy."
|
||||
else:
|
||||
lbl = "<b>No selection made.</b>"
|
||||
msg = "You have not selected a USB device."
|
||||
if len(lbl):
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def write_file(self, ifile, ofile):
|
||||
self.progress_bar.reset()
|
||||
self.progress_bar.show()
|
||||
|
||||
f_from = os.open(ifile, os.O_RDONLY)
|
||||
f_to = os.open(ofile, os.O_WRONLY)
|
||||
|
||||
total_size = os.stat(ifile).st_size
|
||||
written_size = 0
|
||||
|
||||
while True:
|
||||
buf = os.read(f_from, 1024*1024)
|
||||
if not buf:
|
||||
break
|
||||
os.write(f_to, buf)
|
||||
written_size += 1024*1024
|
||||
self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
|
||||
|
||||
self.update_progress_bar("Writing completed:", 1.0)
|
||||
os.close(f_from)
|
||||
os.close(f_to)
|
||||
self.progress_bar.hide()
|
||||
172
bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
Normal file
172
bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
Normal file
@@ -0,0 +1,172 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class ImageSelectionDialog (CrumbsDialog):
|
||||
|
||||
__columns__ = [{
|
||||
'col_name' : 'Image name',
|
||||
'col_id' : 0,
|
||||
'col_style': 'text',
|
||||
'col_min' : 400,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Select',
|
||||
'col_id' : 1,
|
||||
'col_style': 'radio toggle',
|
||||
'col_min' : 160,
|
||||
'col_max' : 160
|
||||
}]
|
||||
|
||||
|
||||
def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
|
||||
super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.image_folder = image_folder
|
||||
self.image_types = image_types
|
||||
self.image_list = []
|
||||
self.image_names = []
|
||||
self.image_extension = image_extension
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.fill_image_store()
|
||||
|
||||
def create_visual_elements(self):
|
||||
hbox = gtk.HBox(False, 6)
|
||||
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
entry = gtk.Entry()
|
||||
entry.set_text(self.image_folder)
|
||||
table = gtk.Table(1, 10, True)
|
||||
table.set_size_request(560, -1)
|
||||
hbox.pack_start(table, expand=False, fill=False)
|
||||
table.attach(entry, 0, 9, 0, 1)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
|
||||
open_button = gtk.Button()
|
||||
open_button.set_image(image)
|
||||
open_button.connect("clicked", self.select_path_cb, self, entry)
|
||||
table.attach(open_button, 9, 10, 0, 1)
|
||||
|
||||
self.image_table = HobViewTable(self.__columns__, "Images")
|
||||
self.image_table.set_size_request(-1, 300)
|
||||
self.image_table.connect("toggled", self.toggled_cb)
|
||||
self.image_table.connect_group_selection(self.table_selected_cb)
|
||||
self.image_table.connect("row-activated", self.row_actived_cb)
|
||||
self.vbox.pack_start(self.image_table, expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def change_image_cb(self, model, path, columnid):
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][columnid] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
model[path][columnid] = True
|
||||
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
self.change_image_cb(model, path, columnid)
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if paths:
|
||||
self.change_image_cb(model, paths[0], 1)
|
||||
|
||||
def row_actived_cb(self, tab, model, path):
|
||||
self.change_image_cb(model, path, 1)
|
||||
self.emit('response', gtk.RESPONSE_YES)
|
||||
|
||||
def select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
text = entry.get_text()
|
||||
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
entry.set_text(path)
|
||||
self.image_folder = path
|
||||
self.fill_image_store()
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def fill_image_store(self):
|
||||
self.image_list = []
|
||||
self.image_store.clear()
|
||||
imageset = set()
|
||||
for root, dirs, files in os.walk(self.image_folder):
|
||||
# ignore the sub directories
|
||||
dirs[:] = []
|
||||
for f in files:
|
||||
for image_type in self.image_types:
|
||||
if image_type in self.image_extension:
|
||||
real_types = self.image_extension[image_type]
|
||||
else:
|
||||
real_types = [image_type]
|
||||
for real_image_type in real_types:
|
||||
if f.endswith('.' + real_image_type):
|
||||
imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
|
||||
self.image_list.append(f)
|
||||
|
||||
for image in imageset:
|
||||
self.image_store.set(self.image_store.append(), 0, image, 1, False)
|
||||
|
||||
self.image_table.set_model(self.image_store)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
self.image_names = []
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][1]:
|
||||
for f in self.image_list:
|
||||
if f.startswith(self.image_store[path][0] + '.'):
|
||||
self.image_names.append(f)
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
298
bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
Normal file
298
bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
Normal file
@@ -0,0 +1,298 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import tempfile
|
||||
from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
|
||||
"""
|
||||
A custom CellRenderer implementation which is activatable
|
||||
so that we can handle user clicks
|
||||
"""
|
||||
__gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)), }
|
||||
|
||||
def __init__(self):
|
||||
gtk.CellRendererPixbuf.__init__(self)
|
||||
self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
|
||||
self.set_property('follow-state', True)
|
||||
|
||||
"""
|
||||
Respond to a user click on a cell
|
||||
"""
|
||||
def do_activate(self, even, widget, path, background_area, cell_area, flags):
|
||||
self.emit('clicked', path)
|
||||
|
||||
#
|
||||
# LayerSelectionDialog
|
||||
#
|
||||
class LayerSelectionDialog (CrumbsDialog):
|
||||
|
||||
TARGETS = [
|
||||
("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
|
||||
("text/plain", 0, 1),
|
||||
("TEXT", 0, 2),
|
||||
("STRING", 0, 3),
|
||||
]
|
||||
|
||||
def gen_label_widget(self, content):
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup(content)
|
||||
label.show()
|
||||
return label
|
||||
|
||||
def layer_widget_toggled_cb(self, cell, path, layer_store):
|
||||
name = layer_store[path][0]
|
||||
toggle = not layer_store[path][1]
|
||||
layer_store[path][1] = toggle
|
||||
|
||||
def layer_widget_add_clicked_cb(self, action, layer_store, parent):
|
||||
dialog = gtk.FileChooserDialog("Add new layer", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
label = gtk.Label("Select the layer you wish to add")
|
||||
label.show()
|
||||
dialog.set_extra_widget(label)
|
||||
response = dialog.run()
|
||||
path = dialog.get_filename()
|
||||
dialog.destroy()
|
||||
|
||||
lbl = "<b>Error</b>"
|
||||
msg = "Unable to load layer <i>%s</i> because " % path
|
||||
if response == gtk.RESPONSE_YES:
|
||||
import os
|
||||
import os.path
|
||||
layers = []
|
||||
it = layer_store.get_iter_first()
|
||||
while it:
|
||||
layers.append(layer_store.get_value(it, 0))
|
||||
it = layer_store.iter_next(it)
|
||||
|
||||
if not path:
|
||||
msg += "it is an invalid path."
|
||||
elif not os.path.exists(path+"/conf/layer.conf"):
|
||||
msg += "there is no layer.conf inside the directory."
|
||||
elif path in layers:
|
||||
msg += "it is already in loaded layers."
|
||||
else:
|
||||
layer_store.append([path])
|
||||
return
|
||||
dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
|
||||
model, iter = tree_selection.get_selected()
|
||||
if iter:
|
||||
layer_store.remove(iter)
|
||||
|
||||
|
||||
def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
|
||||
hbox = gtk.HBox(False, 6)
|
||||
|
||||
layer_tv = gtk.TreeView()
|
||||
layer_tv.set_rules_hint(True)
|
||||
layer_tv.set_headers_visible(False)
|
||||
tree_selection = layer_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# Allow enable drag and drop of rows including row move
|
||||
dnd_internal_target = ''
|
||||
dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
|
||||
layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
|
||||
dnd_targets,
|
||||
gtk.gdk.ACTION_MOVE)
|
||||
layer_tv.enable_model_drag_dest(dnd_targets,
|
||||
gtk.gdk.ACTION_MOVE)
|
||||
layer_tv.connect("drag_data_get", self.drag_data_get_cb)
|
||||
layer_tv.connect("drag_data_received", self.drag_data_received_cb)
|
||||
|
||||
col0= gtk.TreeViewColumn('Path')
|
||||
cell0 = gtk.CellRendererText()
|
||||
cell0.set_padding(5,2)
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
|
||||
layer_tv.append_column(col0)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(layer_tv)
|
||||
|
||||
table_layer = gtk.Table(2, 10, False)
|
||||
hbox.pack_start(table_layer, expand=True, fill=True)
|
||||
|
||||
table_layer.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
layer_store = gtk.ListStore(gobject.TYPE_STRING)
|
||||
for layer in layers:
|
||||
layer_store.append([layer])
|
||||
|
||||
col1 = gtk.TreeViewColumn('Enabled')
|
||||
layer_tv.append_column(col1)
|
||||
|
||||
cell1 = CellRendererPixbufActivatable()
|
||||
cell1.set_fixed_size(-1,35)
|
||||
cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
|
||||
|
||||
add_button = gtk.Button()
|
||||
add_button.set_relief(gtk.RELIEF_NONE)
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
add_button.add(box)
|
||||
add_button.connect("enter-notify-event", self.add_hover_cb)
|
||||
add_button.connect("leave-notify-event", self.add_leave_cb)
|
||||
self.im = gtk.Image()
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
|
||||
self.im.show()
|
||||
box.pack_start(self.im, expand=False, fill=False, padding=6)
|
||||
lbl = gtk.Label("Add layer")
|
||||
lbl.set_alignment(0.0, 0.5)
|
||||
lbl.show()
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=6)
|
||||
add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
|
||||
table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
|
||||
layer_tv.set_model(layer_store)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, layer_store
|
||||
|
||||
def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
|
||||
treeselection = treeview.get_selection()
|
||||
model, iter = treeselection.get_selected()
|
||||
data = model.get_value(iter, 0)
|
||||
selection.set(selection.target, 8, data)
|
||||
|
||||
def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
|
||||
model = treeview.get_model()
|
||||
data = selection.data
|
||||
drop_info = treeview.get_dest_row_at_pos(x, y)
|
||||
if drop_info:
|
||||
path, position = drop_info
|
||||
iter = model.get_iter(path)
|
||||
if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
|
||||
model.insert_before(iter, [data])
|
||||
else:
|
||||
model.insert_after(iter, [data])
|
||||
else:
|
||||
model.append([data])
|
||||
if context.action == gtk.gdk.ACTION_MOVE:
|
||||
context.finish(True, True, etime)
|
||||
return
|
||||
|
||||
def add_hover_cb(self, button, event):
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
|
||||
|
||||
def add_leave_cb(self, button, event):
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
|
||||
|
||||
def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
|
||||
super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
self.layers = layers
|
||||
self.layers_non_removable = layers_non_removable
|
||||
self.all_layers = all_layers
|
||||
self.layers_changed = False
|
||||
|
||||
# icon for remove button in TreeView
|
||||
im = gtk.Image()
|
||||
im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
|
||||
self.rem_icon = im.get_pixbuf()
|
||||
|
||||
# class members for internal use
|
||||
self.layer_store = None
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
|
||||
layer_widget.set_size_request(450, 250)
|
||||
self.vbox.pack_start(layer_widget, expand=True, fill=True)
|
||||
self.show_all()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
model = self.layer_store
|
||||
it = model.get_iter_first()
|
||||
layers = []
|
||||
while it:
|
||||
layers.append(model.get_value(it, 0))
|
||||
it = model.iter_next(it)
|
||||
|
||||
self.layers_changed = (self.layers != layers)
|
||||
self.layers = layers
|
||||
|
||||
"""
|
||||
A custom cell_data_func to draw a delete 'button' in the TreeView for layers
|
||||
other than the meta layer. The deletion of which is prevented so that the
|
||||
user can't shoot themselves in the foot too badly.
|
||||
"""
|
||||
def draw_delete_button_cb(self, col, cell, model, it, tv):
|
||||
path = model.get_value(it, 0)
|
||||
if path in self.layers_non_removable:
|
||||
cell.set_sensitive(False)
|
||||
cell.set_property('pixbuf', None)
|
||||
cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
|
||||
else:
|
||||
cell.set_property('pixbuf', self.rem_icon)
|
||||
cell.set_sensitive(True)
|
||||
cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
|
||||
|
||||
return True
|
||||
|
||||
"""
|
||||
A custom cell_data_func to write an extra message into the layer path cell
|
||||
for the meta layer. We should inform the user that they can't remove it for
|
||||
their own safety.
|
||||
"""
|
||||
def draw_layer_path_cb(self, col, cell, model, it):
|
||||
path = model.get_value(it, 0)
|
||||
if path in self.layers_non_removable:
|
||||
cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
|
||||
else:
|
||||
cell.set_property('text', path)
|
||||
|
||||
def del_cell_clicked_cb(self, cell, path, model):
|
||||
it = model.get_iter_from_string(path)
|
||||
model.remove(it)
|
||||
163
bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
Normal file
163
bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
Normal file
@@ -0,0 +1,163 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
#
|
||||
# ParsingWarningsDialog
|
||||
#
|
||||
class ParsingWarningsDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, warnings, parent, flags, buttons=None):
|
||||
super(ParsingWarningsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.warnings = warnings
|
||||
self.warning_on = 0
|
||||
self.warn_nb = len(warnings)
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def cancel_button_cb(self, button):
|
||||
self.destroy()
|
||||
|
||||
def previous_button_cb(self, button):
|
||||
self.warning_on = self.warning_on - 1
|
||||
self.refresh_components()
|
||||
|
||||
def next_button_cb(self, button):
|
||||
self.warning_on = self.warning_on + 1
|
||||
self.refresh_components()
|
||||
|
||||
def refresh_components(self):
|
||||
lbl = self.warnings[self.warning_on]
|
||||
#when the warning text has more than 400 chars, it uses a scroll bar
|
||||
if 0<= len(lbl) < 400:
|
||||
self.warning_label.set_size_request(320, 230)
|
||||
self.warning_label.set_use_markup(True)
|
||||
self.warning_label.set_line_wrap(True)
|
||||
self.warning_label.set_markup(lbl)
|
||||
self.warning_label.set_property("yalign", 0.00)
|
||||
else:
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.msgView = gtk.TextView()
|
||||
self.msgView.set_editable(False)
|
||||
self.msgView.set_wrap_mode(gtk.WRAP_WORD)
|
||||
self.msgView.set_cursor_visible(False)
|
||||
self.msgView.set_size_request(320, 230)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(lbl)
|
||||
self.msgView.set_buffer(self.buf)
|
||||
self.textWindow.add(self.msgView)
|
||||
self.msgView.show()
|
||||
|
||||
if self.warning_on==0:
|
||||
self.previous_button.set_sensitive(False)
|
||||
else:
|
||||
self.previous_button.set_sensitive(True)
|
||||
|
||||
if self.warning_on==self.warn_nb-1:
|
||||
self.next_button.set_sensitive(False)
|
||||
else:
|
||||
self.next_button.set_sensitive(True)
|
||||
|
||||
if self.warn_nb>1:
|
||||
self.heading = "Warning " + str(self.warning_on + 1) + " of " + str(self.warn_nb)
|
||||
self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
|
||||
else:
|
||||
self.heading = "Warning"
|
||||
self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
|
||||
|
||||
self.show_all()
|
||||
|
||||
if 0<= len(lbl) < 400:
|
||||
self.textWindow.hide()
|
||||
else:
|
||||
self.warning_label.hide()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(350, 350)
|
||||
self.heading_label = gtk.Label()
|
||||
self.heading_label.set_alignment(0, 0)
|
||||
self.warning_label = gtk.Label()
|
||||
self.warning_label.set_selectable(True)
|
||||
self.warning_label.set_alignment(0, 0)
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
|
||||
table = gtk.Table(1, 10, False)
|
||||
|
||||
cancel_button = gtk.Button()
|
||||
cancel_button.set_label("Close")
|
||||
cancel_button.connect("clicked", self.cancel_button_cb)
|
||||
cancel_button.set_size_request(110, 30)
|
||||
|
||||
self.previous_button = gtk.Button()
|
||||
image1 = gtk.image_new_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_BUTTON)
|
||||
image1.show()
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
self.previous_button.add(box)
|
||||
lbl = gtk.Label("Previous")
|
||||
lbl.show()
|
||||
box.pack_start(image1, expand=False, fill=False, padding=3)
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=3)
|
||||
self.previous_button.connect("clicked", self.previous_button_cb)
|
||||
self.previous_button.set_size_request(110, 30)
|
||||
|
||||
self.next_button = gtk.Button()
|
||||
image2 = gtk.image_new_from_stock(gtk.STOCK_GO_FORWARD, gtk.ICON_SIZE_BUTTON)
|
||||
image2.show()
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
self.next_button.add(box)
|
||||
lbl = gtk.Label("Next")
|
||||
lbl.show()
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=3)
|
||||
box.pack_start(image2, expand=False, fill=False, padding=3)
|
||||
self.next_button.connect("clicked", self.next_button_cb)
|
||||
self.next_button.set_size_request(110, 30)
|
||||
|
||||
#when there more than one warning, we need "previous" and "next" button
|
||||
if self.warn_nb>1:
|
||||
self.vbox.pack_start(self.heading_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.warning_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.textWindow, expand=False, fill=False)
|
||||
table.attach(cancel_button, 6, 7, 0, 1, xoptions=gtk.SHRINK)
|
||||
table.attach(self.previous_button, 7, 8, 0, 1, xoptions=gtk.SHRINK)
|
||||
table.attach(self.next_button, 8, 9, 0, 1, xoptions=gtk.SHRINK)
|
||||
self.vbox.pack_end(table, expand=False, fill=False)
|
||||
else:
|
||||
self.vbox.pack_start(self.heading_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.warning_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.textWindow, expand=False, fill=False)
|
||||
cancel_button = self.add_button("Close", gtk.RESPONSE_CANCEL)
|
||||
HobAltButton.style_button(cancel_button)
|
||||
|
||||
self.refresh_components()
|
||||
437
bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
Normal file
437
bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
Normal file
@@ -0,0 +1,437 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2013 Intel Corporation
|
||||
#
|
||||
# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import string
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import tempfile
|
||||
import glib
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class PropertyDialog(CrumbsDialog):
|
||||
|
||||
def __init__(self, title, parent, information, flags, buttons=None):
|
||||
|
||||
super(PropertyDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.properties = information
|
||||
|
||||
if len(self.properties) == 10:
|
||||
self.create_recipe_visual_elements()
|
||||
elif len(self.properties) == 5:
|
||||
self.create_package_visual_elements()
|
||||
else:
|
||||
self.create_information_visual_elements()
|
||||
|
||||
|
||||
def create_information_visual_elements(self):
|
||||
|
||||
HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
|
||||
ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
self.table = gtk.Table(1,1,False)
|
||||
self.table.set_row_spacings(0)
|
||||
self.table.set_col_spacings(0)
|
||||
|
||||
self.image = gtk.Image()
|
||||
self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
|
||||
self.image.set_property("xalign",0)
|
||||
#self.vbox.add(self.image)
|
||||
|
||||
image_info = self.properties.split("*")[0]
|
||||
info = self.properties.split("*")[1]
|
||||
|
||||
vbox = gtk.VBox(True, spacing=30)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(False)
|
||||
self.label_short.set_markup(image_info)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.info_label = gtk.Label()
|
||||
self.info_label.set_line_wrap(True)
|
||||
self.info_label.set_markup(info)
|
||||
self.info_label.set_property("yalign", 0.5)
|
||||
|
||||
self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
|
||||
self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
|
||||
self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
|
||||
|
||||
self.vbox.add(self.table)
|
||||
self.connect('delete-event', lambda w, e: self.destroy() or True)
|
||||
|
||||
def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
|
||||
try:
|
||||
(path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
|
||||
it = widget.get_model().get_iter(path)
|
||||
value = widget.get_model().get_value(it,cell)
|
||||
if value in self.tooltip_items:
|
||||
tooltips.set_tip(widget, self.tooltip_items[value])
|
||||
tooltips.enable()
|
||||
else:
|
||||
tooltips.set_tip(widget, emptyText)
|
||||
except:
|
||||
tooltips.set_tip(widget, emptyText)
|
||||
|
||||
|
||||
def create_package_visual_elements(self):
|
||||
|
||||
import json
|
||||
|
||||
name = self.properties['name']
|
||||
binb = self.properties['binb']
|
||||
size = self.properties['size']
|
||||
recipe = self.properties['recipe']
|
||||
file_list = json.loads(self.properties['files_list'])
|
||||
|
||||
files_temp = ''
|
||||
paths_temp = ''
|
||||
files_binb = []
|
||||
paths_binb = []
|
||||
|
||||
self.tooltip_items = {}
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
#cleaning out the recipe variable
|
||||
recipe = recipe.split("+")[0]
|
||||
|
||||
vbox = gtk.VBox(True,spacing = 0)
|
||||
|
||||
###################################### NAME ROW + COL #################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
###################################### SIZE ROW + COL ######################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### RECIPE ROW + COL #########################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### BINB ROW + COL #######################################
|
||||
|
||||
if binb != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_size_request(300,-1)
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
#################################### FILES BROUGHT BY PACKAGES ###################################
|
||||
|
||||
if file_list:
|
||||
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.textWindow.set_size_request(100, 170)
|
||||
|
||||
packagefiles_store = gtk.ListStore(str)
|
||||
|
||||
self.packagefiles_tv = gtk.TreeView()
|
||||
self.packagefiles_tv.set_rules_hint(True)
|
||||
self.packagefiles_tv.set_headers_visible(True)
|
||||
self.textWindow.add(self.packagefiles_tv)
|
||||
|
||||
self.cell1 = gtk.CellRendererText()
|
||||
col1 = gtk.TreeViewColumn('Package files', self.cell1)
|
||||
col1.set_cell_data_func(self.cell1, self.regex_field)
|
||||
self.packagefiles_tv.append_column(col1)
|
||||
|
||||
items = file_list.keys()
|
||||
items.sort()
|
||||
for item in items:
|
||||
fullpath = item
|
||||
while len(item) > 35:
|
||||
item = item[:len(item)/2] + "" + item[len(item)/2+1:]
|
||||
if len(item) == 35:
|
||||
item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
|
||||
self.tooltip_items[item] = fullpath
|
||||
|
||||
packagefiles_store.append([str(item)])
|
||||
|
||||
self.packagefiles_tv.set_model(packagefiles_store)
|
||||
|
||||
tips = gtk.Tooltips()
|
||||
tips.set_tip(self.packagefiles_tv, "")
|
||||
self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
|
||||
self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
|
||||
|
||||
self.vbox.add(self.textWindow)
|
||||
|
||||
self.vbox.show_all()
|
||||
|
||||
|
||||
def regex_field(self, column, cell, model, iter):
|
||||
cell.set_property('text', model.get_value(iter, 0))
|
||||
return
|
||||
|
||||
|
||||
def create_recipe_visual_elements(self):
|
||||
|
||||
summary = self.properties['summary']
|
||||
name = self.properties['name']
|
||||
version = self.properties['version']
|
||||
revision = self.properties['revision']
|
||||
binb = self.properties['binb']
|
||||
group = self.properties['group']
|
||||
license = self.properties['license']
|
||||
homepage = self.properties['homepage']
|
||||
bugtracker = self.properties['bugtracker']
|
||||
description = self.properties['description']
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
#cleaning out the version variable and also the summary
|
||||
version = version.split(":")[1]
|
||||
if len(version) > 30:
|
||||
version = version.split("+")[0]
|
||||
else:
|
||||
version = version.split("-")[0]
|
||||
license = license.replace("&" , "and")
|
||||
if (homepage == ''):
|
||||
homepage = 'unknown'
|
||||
if (bugtracker == ''):
|
||||
bugtracker = 'unknown'
|
||||
summary = summary.split("+")[0]
|
||||
|
||||
#calculating the rows needed for the table
|
||||
binb_items_count = len(binb.split(','))
|
||||
binb_items = binb.split(',')
|
||||
|
||||
vbox = gtk.VBox(False,spacing = 0)
|
||||
|
||||
######################################## SUMMARY LABEL #########################################
|
||||
|
||||
if summary != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_width_chars(37)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>" + summary + "</b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
########################################## NAME ROW + COL #######################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
####################################### VERSION ROW + COL ####################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### REVISION ROW + COL #####################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################## GROUP ROW + COL ############################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################# HOMEPAGE ROW + COL ############################################
|
||||
|
||||
if homepage != 'unknown':
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
if len(homepage) > 35:
|
||||
self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
|
||||
else:
|
||||
self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
|
||||
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>Homepage: </b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################# BUGTRACKER ROW + COL ###########################################
|
||||
|
||||
if bugtracker != 'unknown':
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
if len(bugtracker) > 35:
|
||||
self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
|
||||
else:
|
||||
self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>Bugtracker: </b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################# LICENSE ROW + COL ############################################
|
||||
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.label_info.set_markup(license)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">License: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################### BINB ROW+COL #############################################
|
||||
|
||||
if binb != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.vbox.add(self.label_short)
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_width_chars(36)
|
||||
if len(binb) > 200:
|
||||
scrolled_window = gtk.ScrolledWindow()
|
||||
scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
|
||||
scrolled_window.set_size_request(100,100)
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_padding(6,6)
|
||||
self.label_info.set_alignment(0,0)
|
||||
self.label_info.set_line_wrap(True)
|
||||
scrolled_window.add_with_viewport(self.label_info)
|
||||
self.vbox.add(scrolled_window)
|
||||
else:
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################ DESCRIPTION TAG ROW #################################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Description </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################ DESCRIPTION INFORMATION ROW ##########################################
|
||||
|
||||
hbox = gtk.HBox(True,spacing = 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_width_chars(36)
|
||||
if len(description) > 200:
|
||||
scrolled_window = gtk.ScrolledWindow()
|
||||
scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
|
||||
scrolled_window.set_size_request(100,100)
|
||||
self.label_short.set_markup(description)
|
||||
self.label_short.set_padding(6,6)
|
||||
self.label_short.set_alignment(0,0)
|
||||
self.label_short.set_line_wrap(True)
|
||||
scrolled_window.add_with_viewport(self.label_short)
|
||||
self.vbox.add(scrolled_window)
|
||||
else:
|
||||
self.label_short.set_markup(description)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
self.vbox.show_all()
|
||||
90
bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
Normal file
90
bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class ProxyDetailsDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, user, passwd, parent, flags, buttons=None):
|
||||
super(ProxyDetailsDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.auth = not (user == None or passwd == None or user == "")
|
||||
self.user = user or ""
|
||||
self.passwd = passwd or ""
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.auth_checkbox = gtk.CheckButton("Use authentication")
|
||||
self.auth_checkbox.set_tooltip_text("Check this box to set the username and the password")
|
||||
self.auth_checkbox.set_active(self.auth)
|
||||
self.auth_checkbox.connect("toggled", self.auth_checkbox_toggled_cb)
|
||||
self.vbox.pack_start(self.auth_checkbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.user_label = gtk.Label("Username:")
|
||||
self.user_text = gtk.Entry()
|
||||
self.user_text.set_text(self.user)
|
||||
hbox.pack_start(self.user_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.user_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.passwd_label = gtk.Label("Password:")
|
||||
self.passwd_text = gtk.Entry()
|
||||
self.passwd_text.set_text(self.passwd)
|
||||
hbox.pack_start(self.passwd_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.passwd_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
self.refresh_auth_components()
|
||||
self.show_all()
|
||||
|
||||
def refresh_auth_components(self):
|
||||
self.user_label.set_sensitive(self.auth)
|
||||
self.user_text.set_editable(self.auth)
|
||||
self.user_text.set_sensitive(self.auth)
|
||||
self.passwd_label.set_sensitive(self.auth)
|
||||
self.passwd_text.set_editable(self.auth)
|
||||
self.passwd_text.set_sensitive(self.auth)
|
||||
|
||||
def auth_checkbox_toggled_cb(self, button):
|
||||
self.auth = self.auth_checkbox.get_active()
|
||||
self.refresh_auth_components()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_OK:
|
||||
if self.auth:
|
||||
self.user = self.user_text.get_text()
|
||||
self.passwd = self.passwd_text.get_text()
|
||||
else:
|
||||
self.user = None
|
||||
self.passwd = None
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user