mirror of
https://git.yoctoproject.org/poky
synced 2026-02-03 23:38:44 +01:00
Compare commits
383 Commits
uninative-
...
daisy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6717d19848 | ||
|
|
aeb31e09f0 | ||
|
|
26f0c306cb | ||
|
|
d65bcef0ae | ||
|
|
f1729cdbfa | ||
|
|
925e46623a | ||
|
|
b8603a494d | ||
|
|
dc2ee2feb7 | ||
|
|
9901828b0d | ||
|
|
ad89b1fe80 | ||
|
|
ff505d781d | ||
|
|
905938d0a1 | ||
|
|
1b58412449 | ||
|
|
2cd70c9150 | ||
|
|
9e9bb64fb5 | ||
|
|
39ec0738d4 | ||
|
|
3d7df7b5b5 | ||
|
|
0b1be9dc67 | ||
|
|
b9ec9f7425 | ||
|
|
9e4aad97c3 | ||
|
|
240da75616 | ||
|
|
b2ba41b575 | ||
|
|
56bd68e82c | ||
|
|
0fb598c6b9 | ||
|
|
b0c1820261 | ||
|
|
ccd470ba5f | ||
|
|
90a33dde44 | ||
|
|
b9da1f441b | ||
|
|
ccbb7ef72f | ||
|
|
cafdccb29c | ||
|
|
13eda67126 | ||
|
|
91c507ce1c | ||
|
|
97e9be8130 | ||
|
|
523aaea8e2 | ||
|
|
e625a82af2 | ||
|
|
efde5a1303 | ||
|
|
f1bb6acacc | ||
|
|
7effa6edd0 | ||
|
|
02e603e48c | ||
|
|
2d80a6bc8a | ||
|
|
159f66aea7 | ||
|
|
6b8f7999c3 | ||
|
|
827dc7f12c | ||
|
|
7c0d759c55 | ||
|
|
9ca89fe495 | ||
|
|
c82164fd0a | ||
|
|
586a3d5ff5 | ||
|
|
7849633469 | ||
|
|
367b862d59 | ||
|
|
c4e9d9d9ae | ||
|
|
7a43fb95d1 | ||
|
|
46e8377c42 | ||
|
|
148b7d20d4 | ||
|
|
d759301a34 | ||
|
|
134246d3d4 | ||
|
|
c088bac2f0 | ||
|
|
9766c76268 | ||
|
|
78b1cbcc72 | ||
|
|
3a4ee6bfd9 | ||
|
|
9bb6f7f3f0 | ||
|
|
15919f7e76 | ||
|
|
1e216c8087 | ||
|
|
a67b95ade2 | ||
|
|
1e668ccf1a | ||
|
|
e3dd621197 | ||
|
|
30b8d9378b | ||
|
|
895c86d71d | ||
|
|
07a7905689 | ||
|
|
e76d790bbf | ||
|
|
b60383c1b9 | ||
|
|
19dc8bf950 | ||
|
|
6c576a4ac8 | ||
|
|
9f5ea81070 | ||
|
|
51a5a5df84 | ||
|
|
6c9eb8a67a | ||
|
|
b8e521809b | ||
|
|
994b637d58 | ||
|
|
a85328245d | ||
|
|
61da1b1197 | ||
|
|
3428e70035 | ||
|
|
dbf99ab134 | ||
|
|
18d859a8c9 | ||
|
|
313864bf52 | ||
|
|
e93f9a8382 | ||
|
|
df4a397df9 | ||
|
|
9b3389e023 | ||
|
|
4b22a21b4e | ||
|
|
27a877becf | ||
|
|
2d5bd89565 | ||
|
|
bda51ee782 | ||
|
|
5dd12beccd | ||
|
|
1d21eaf4e0 | ||
|
|
95821e8566 | ||
|
|
57138de0fc | ||
|
|
57a806cc32 | ||
|
|
452619ba41 | ||
|
|
8d28013312 | ||
|
|
570345adfd | ||
|
|
9062377624 | ||
|
|
0143097095 | ||
|
|
4b1b580749 | ||
|
|
65ed47e597 | ||
|
|
608ac7794f | ||
|
|
4dada3c092 | ||
|
|
43903a5bfd | ||
|
|
e464615684 | ||
|
|
7a12eda785 | ||
|
|
d88fa68141 | ||
|
|
3a93bfe1c7 | ||
|
|
9fee4d138b | ||
|
|
87e924e377 | ||
|
|
8de7e102cf | ||
|
|
6780f20525 | ||
|
|
1d04721fe8 | ||
|
|
a4d8015687 | ||
|
|
7b57145498 | ||
|
|
38b1b68923 | ||
|
|
ec3c8fcf81 | ||
|
|
b2f045c400 | ||
|
|
87671f72e7 | ||
|
|
b028947d67 | ||
|
|
f5847d4f24 | ||
|
|
59198004c0 | ||
|
|
984be33145 | ||
|
|
96ee64c96b | ||
|
|
7a8f9114bc | ||
|
|
21ac977e2a | ||
|
|
33a4425d6d | ||
|
|
8abf510a13 | ||
|
|
3ff30c0bfb | ||
|
|
7e68f57dec | ||
|
|
3b998b3f4c | ||
|
|
d8155f1f95 | ||
|
|
e378410fb2 | ||
|
|
3a1d9e9e11 | ||
|
|
26dcc54c60 | ||
|
|
8e05d5e3fe | ||
|
|
5c1f10f56e | ||
|
|
0843e07873 | ||
|
|
793afb3e81 | ||
|
|
aebbf8c8f3 | ||
|
|
c4f1f0f491 | ||
|
|
810dd79720 | ||
|
|
b0ce70ffa8 | ||
|
|
ac2d94b684 | ||
|
|
ce2336ddc7 | ||
|
|
5b8c5ea151 | ||
|
|
7a42bfecc2 | ||
|
|
26db62e359 | ||
|
|
18224a4a46 | ||
|
|
6d0ae0ef44 | ||
|
|
153787d4df | ||
|
|
c55dea6a82 | ||
|
|
4ab29fc58f | ||
|
|
0bc0ee66a8 | ||
|
|
db6819b0c3 | ||
|
|
bc3484e76c | ||
|
|
9d84b2440d | ||
|
|
f8e61ed564 | ||
|
|
1db22d39b5 | ||
|
|
b7bf8bb051 | ||
|
|
839892ed27 | ||
|
|
232af2ec04 | ||
|
|
6a6bd2e96b | ||
|
|
3103f04a30 | ||
|
|
bb27ca7562 | ||
|
|
7dcd9a6b72 | ||
|
|
a43dba8c29 | ||
|
|
d52b91316e | ||
|
|
efbf15ce20 | ||
|
|
3caae900f3 | ||
|
|
3428e6e0e4 | ||
|
|
96ca984621 | ||
|
|
8386f4203d | ||
|
|
b4b50e52d2 | ||
|
|
6101dd2b4c | ||
|
|
ebf62ba85d | ||
|
|
9deb3333b0 | ||
|
|
5bb9a05e0f | ||
|
|
9ee3f77ed9 | ||
|
|
a714cf8700 | ||
|
|
d376e31c92 | ||
|
|
333e5f7076 | ||
|
|
a2fa51bdde | ||
|
|
cb468dfaf0 | ||
|
|
d4c5f12601 | ||
|
|
7e6902963f | ||
|
|
c166a5add3 | ||
|
|
303d17ac3c | ||
|
|
28938930ba | ||
|
|
bff6db6712 | ||
|
|
62b1fef787 | ||
|
|
fc9229e4ba | ||
|
|
8509c1a7e5 | ||
|
|
05d751c23a | ||
|
|
b8f6c7c794 | ||
|
|
474ea6b826 | ||
|
|
21d15fac0e | ||
|
|
e98512e1e3 | ||
|
|
1f80e7f675 | ||
|
|
6a4a66aabb | ||
|
|
c03bb4d0c7 | ||
|
|
f91b780b1a | ||
|
|
938e925356 | ||
|
|
c899777010 | ||
|
|
afb6a3688f | ||
|
|
4209379cc8 | ||
|
|
6add5ac648 | ||
|
|
af515ca686 | ||
|
|
f9f97a1fed | ||
|
|
48169ac9bc | ||
|
|
f091b8a3cf | ||
|
|
38083d01e7 | ||
|
|
278c551168 | ||
|
|
83d1ce9e27 | ||
|
|
d44881fecc | ||
|
|
948b8461e8 | ||
|
|
7bcc609bf0 | ||
|
|
f372806546 | ||
|
|
2361a8171b | ||
|
|
ee4d106987 | ||
|
|
896511d564 | ||
|
|
01c613e4bc | ||
|
|
295dd76931 | ||
|
|
cd7e7addd7 | ||
|
|
ac9725acc5 | ||
|
|
b6124bdbfb | ||
|
|
989013222e | ||
|
|
87eaf4cf4a | ||
|
|
8e22337e22 | ||
|
|
e130d2c8eb | ||
|
|
a692a9182a | ||
|
|
44fddc9ba1 | ||
|
|
8bbd5958b0 | ||
|
|
6d898aef4c | ||
|
|
412cb58083 | ||
|
|
57ccbc4c15 | ||
|
|
5d3c54a318 | ||
|
|
e5727ad31a | ||
|
|
0cafa0eafe | ||
|
|
bea6067392 | ||
|
|
c056b5e9a2 | ||
|
|
7bb4692ead | ||
|
|
dea4a69cfc | ||
|
|
53d2def225 | ||
|
|
94e2a1793e | ||
|
|
b20ba9c4e5 | ||
|
|
7f4ff1a5c5 | ||
|
|
8fd7098318 | ||
|
|
9662a47204 | ||
|
|
78366c7e2c | ||
|
|
287c3bec51 | ||
|
|
80f625a364 | ||
|
|
978c6c00d6 | ||
|
|
090cb60d49 | ||
|
|
2784c08229 | ||
|
|
b1ab59a8d0 | ||
|
|
3141bc16a5 | ||
|
|
b057375f77 | ||
|
|
4f0c5e5b32 | ||
|
|
9fb409bcc5 | ||
|
|
3d95a1cce5 | ||
|
|
361ddb10de | ||
|
|
133472e7aa | ||
|
|
e4b9dabfbb | ||
|
|
45dbb4a080 | ||
|
|
517c2cc88d | ||
|
|
a1958d47c6 | ||
|
|
6547137fa3 | ||
|
|
651f3dc078 | ||
|
|
8333887235 | ||
|
|
33a8687635 | ||
|
|
7fee883b8b | ||
|
|
e6ea60b131 | ||
|
|
342eff6b38 | ||
|
|
8e5103a026 | ||
|
|
52fa8b8582 | ||
|
|
7892063223 | ||
|
|
5f4a75f904 | ||
|
|
b4e7ebe227 | ||
|
|
9ac13c344b | ||
|
|
9b6c56a07d | ||
|
|
02faddb5ca | ||
|
|
77439dafd0 | ||
|
|
5709daae36 | ||
|
|
f0a153a7f6 | ||
|
|
f2103de785 | ||
|
|
ec984f1697 | ||
|
|
619c449b68 | ||
|
|
8bd20eb128 | ||
|
|
2645411074 | ||
|
|
d8b564530e | ||
|
|
af91e98e32 | ||
|
|
46c39b60c5 | ||
|
|
9153d11e6c | ||
|
|
de20bf01e4 | ||
|
|
56aaa6450b | ||
|
|
5ca9285434 | ||
|
|
7631f6bbfc | ||
|
|
08e2f06d36 | ||
|
|
424643f463 | ||
|
|
84396ed610 | ||
|
|
b28a902253 | ||
|
|
b94ebc582f | ||
|
|
07600df4cb | ||
|
|
00d8024741 | ||
|
|
aa39d9a2df | ||
|
|
98ad3cb2c0 | ||
|
|
88b7b1a88a | ||
|
|
ec1f93c50c | ||
|
|
6157ab451b | ||
|
|
5306aaab07 | ||
|
|
31ab5dafa8 | ||
|
|
84d524c938 | ||
|
|
7bbc4b8a77 | ||
|
|
33dfe60c35 | ||
|
|
78217d37d2 | ||
|
|
69d4c63428 | ||
|
|
1eb75407ae | ||
|
|
2c79d57ded | ||
|
|
de87ba4b37 | ||
|
|
14a666b094 | ||
|
|
84bcf66436 | ||
|
|
cba4a8b80d | ||
|
|
c23e7052fb | ||
|
|
7253253972 | ||
|
|
cddb415f72 | ||
|
|
6aed9f819d | ||
|
|
30ac79c16d | ||
|
|
3f00873a8a | ||
|
|
4f6fb8c362 | ||
|
|
a3dcfa6a6a | ||
|
|
e0999660a8 | ||
|
|
897b87195c | ||
|
|
1dfcb8968c | ||
|
|
3f7bfb38a2 | ||
|
|
5b09536d38 | ||
|
|
21cd3d6212 | ||
|
|
4dc19ba0a9 | ||
|
|
bbaf0c65f1 | ||
|
|
0cb01121eb | ||
|
|
f9c2b9083e | ||
|
|
3c8da7d5bc | ||
|
|
3e49cee7e8 | ||
|
|
0ba2239abb | ||
|
|
87a71c5017 | ||
|
|
761c6172f6 | ||
|
|
b958f2e6dc | ||
|
|
4123b4e575 | ||
|
|
9301072deb | ||
|
|
6d3e061287 | ||
|
|
3ff180c173 | ||
|
|
19b9fde3b2 | ||
|
|
bdc27cc405 | ||
|
|
7e30874db2 | ||
|
|
f9d0fd9bb1 | ||
|
|
3353d6bcce | ||
|
|
904c35e049 | ||
|
|
9ff3a1de42 | ||
|
|
9ab4d1f5e6 | ||
|
|
a0f9efe7d6 | ||
|
|
a6193f3822 | ||
|
|
f0cbff052e | ||
|
|
1929766ed5 | ||
|
|
bd1e9a6a3a | ||
|
|
409d3cb7a2 | ||
|
|
49efe23169 | ||
|
|
46c0518279 | ||
|
|
40396bee2b | ||
|
|
cdbe3b5cee | ||
|
|
aba074edbf | ||
|
|
f11e51056d | ||
|
|
9a178b6016 | ||
|
|
d8ee1658de | ||
|
|
b5c29e15f4 | ||
|
|
21da2dbb78 | ||
|
|
520b36fe41 | ||
|
|
82733c9f71 | ||
|
|
32857c5596 | ||
|
|
1d6146e0b1 | ||
|
|
a095826126 | ||
|
|
fd435cbfc5 | ||
|
|
6ca67b3288 |
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,20 +1,19 @@
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
/build*/
|
||||
build*/
|
||||
pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
!meta-hob
|
||||
hob-image-*.bb
|
||||
*.swp
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
!meta-poky
|
||||
!meta-yocto
|
||||
!meta-yocto-bsp
|
||||
!meta-yocto-imported
|
||||
@@ -22,6 +21,3 @@ documentation/user-manual/user-manual.html
|
||||
documentation/user-manual/user-manual.pdf
|
||||
documentation/user-manual/user-manual.tgz
|
||||
pull-*/
|
||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
||||
bitbake/lib/toaster/contrib/tts/log/*
|
||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
||||
@@ -1,2 +1,2 @@
|
||||
# Template settings
|
||||
TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
|
||||
TEMPLATECONF=${TEMPLATECONF:-meta-yocto/conf}
|
||||
|
||||
29
README
29
README
@@ -30,29 +30,20 @@ For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
meta-poky, meta-yocto-bsp:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -52,13 +52,6 @@ The following boards are supported by the meta-yocto-bsp layer:
|
||||
For more information see the board's section below. The appropriate MACHINE
|
||||
variable value corresponding to the board is given in brackets.
|
||||
|
||||
Reference Board Maintenance
|
||||
===========================
|
||||
|
||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
||||
|
||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
||||
|
||||
Consumer Devices
|
||||
================
|
||||
@@ -105,7 +98,9 @@ Intel Atom platforms:
|
||||
|
||||
and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE
|
||||
type supports ethernet, wifi, sound, and Intel/vesa graphics by default in
|
||||
addition to common PC input devices, busses, and so on.
|
||||
addition to common PC input devices, busses, and so on. Note that it does not
|
||||
included the binary-only graphic drivers used on some Atom platforms, for
|
||||
accelerated graphics on these machines please refer to meta-intel.
|
||||
|
||||
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
||||
or over the network. Writing generated images to physical media is
|
||||
@@ -249,14 +244,14 @@ if used via a usb card reader):
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
6. If using core-image-minimal rootfs, install the kernel uImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
# cp uImage-beaglebone.bin /media/root/boot/uImage
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
# cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
@@ -322,22 +317,6 @@ Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
||||
=> bootm 1000000 - 2000000
|
||||
|
||||
--- Booting from JFFS2 root ---
|
||||
|
||||
1. First boot the board with NFS root.
|
||||
|
||||
2. Erase the MTD partition which will be used as root:
|
||||
|
||||
$ flash_eraseall /dev/mtd3
|
||||
|
||||
3. Copy the JFFS2 image to the MTD partition:
|
||||
|
||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
||||
|
||||
4. Then reboot the board and set up the environment in U-Boot:
|
||||
|
||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
||||
|
||||
|
||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
==============================================
|
||||
@@ -350,14 +329,11 @@ Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
* TFTP server installed on your workstation
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -365,7 +341,7 @@ Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
--- Booting from NFS root ---
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
|
||||
|
||||
@@ -8,5 +8,3 @@ Foundation and individual contributors.
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the Apache License 2.0.
|
||||
|
||||
* jQuery is redistributed under the MIT license.
|
||||
|
||||
* QUnit is redistributed under the MIT license.
|
||||
|
||||
@@ -23,31 +23,340 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
|
||||
import optparse
|
||||
import warnings
|
||||
from traceback import format_exception
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
__version__ = "1.31.0"
|
||||
__version__ = "1.22.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
if not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
sys.exit("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. Parameters are passed to the signature handling code, use 'none' if no specific handler is required.",
|
||||
action = "append", dest = "dump_signatures", default = [])
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-package environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
return server
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
configParams = BitBakeConfigParameters()
|
||||
configuration = cookerdata.CookerConfiguration()
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
server.saveConnectionConfigParams(configParams)
|
||||
|
||||
if not configParams.server_only:
|
||||
if configParams.status_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except:
|
||||
sys.exit(1)
|
||||
if not server_connection:
|
||||
sys.exit(1)
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection(featureset)
|
||||
if not server_connection:
|
||||
if configParams.kill_server:
|
||||
bb.fatal("Server already killed")
|
||||
configParams.bind = configParams.remote_server
|
||||
start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
server_connection = server.establishConnection(featureset)
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
@@ -95,7 +79,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
# Recurse into signature comparison
|
||||
output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@@ -130,9 +114,9 @@ else:
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
@@ -57,9 +57,9 @@ else:
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -50,6 +50,6 @@ if __name__ == "__main__":
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
traceback.print_exc(5)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -26,30 +26,24 @@ except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
def usage():
|
||||
print('usage: [BB_SKIP_NETTESTS=yes] %s [-v] [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
print('usage: %s [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
|
||||
verbosity = 1
|
||||
|
||||
tests = sys.argv[1:]
|
||||
if '-v' in sys.argv:
|
||||
tests.remove('-v')
|
||||
verbosity = 2
|
||||
|
||||
if tests:
|
||||
if len(sys.argv) > 1:
|
||||
if '--help' in sys.argv[1:]:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
tests = sys.argv[1:]
|
||||
else:
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
t = '.'.join(t.split('.')[:3])
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests, verbosity=verbosity)
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
|
||||
@@ -10,30 +10,12 @@ import bb
|
||||
import select
|
||||
import errno
|
||||
import signal
|
||||
from multiprocessing import Lock
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
if len(sys.argv) != 2 or sys.argv[1] != "decafbad":
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1].startswith("decafbadbad"):
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
@@ -45,9 +27,6 @@ except ImportError:
|
||||
|
||||
worker_pipe = sys.stdout.fileno()
|
||||
bb.utils.nonblockingfd(worker_pipe)
|
||||
# Need to guard against multiprocessing being used in child processes
|
||||
# and multiple processes trying to write to the parent at the same time
|
||||
worker_pipe_lock = None
|
||||
|
||||
handler = bb.event.LogHandler()
|
||||
logger.addHandler(handler)
|
||||
@@ -84,21 +63,14 @@ def worker_flush():
|
||||
written = os.write(worker_pipe, worker_queue)
|
||||
worker_queue = worker_queue[written:]
|
||||
except (IOError, OSError) as e:
|
||||
if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
def worker_child_fire(event, d):
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
worker_pipe_lock.release()
|
||||
except IOError:
|
||||
sigterm_handler(None, None)
|
||||
raise
|
||||
worker_pipe.write(data)
|
||||
|
||||
bb.event.worker_fire = worker_fire
|
||||
|
||||
@@ -109,11 +81,6 @@ def workerlog_write(msg):
|
||||
lf.write(msg)
|
||||
lf.flush()
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
@@ -159,32 +126,22 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
pipeout = os.fdopen(pipeout, 'wb', 0)
|
||||
pid = os.fork()
|
||||
except OSError as e:
|
||||
logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
bb.utils.signal_on_parent_exit("SIGTERM")
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
bb.event.worker_pid = os.getpid()
|
||||
bb.event.worker_fire = worker_child_fire
|
||||
worker_pipe = pipeout
|
||||
worker_pipe_lock = Lock()
|
||||
|
||||
# Make the child the process group leader and ensure no
|
||||
# child process will be controlled by the current terminal
|
||||
# This ensures signals sent to the controlling terminal like Ctrl+C
|
||||
# don't stop the child processes.
|
||||
os.setsid()
|
||||
# Make the child the process group leader
|
||||
os.setpgid(0, 0)
|
||||
# No stdin
|
||||
newsi = os.open(os.devnull, os.O_RDWR)
|
||||
os.dup2(newsi, sys.stdin.fileno())
|
||||
@@ -197,13 +154,15 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
data.setVar("DATE", workerdata["date"])
|
||||
data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["hashes"], workerdata["hash_deps"], workerdata["sigchecksums"])
|
||||
ret = 0
|
||||
try:
|
||||
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
|
||||
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", "")))
|
||||
for h in workerdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, workerdata["hashes"][h])
|
||||
for h in workerdata["hash_deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, workerdata["hash_deps"][h])
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
@@ -224,22 +183,11 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
logger.critical(str(exc))
|
||||
os._exit(1)
|
||||
try:
|
||||
if cfg.dry_run:
|
||||
return 0
|
||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
if not cfg.dry_run:
|
||||
ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
os._exit(ret)
|
||||
except:
|
||||
os._exit(1)
|
||||
if not profiling:
|
||||
os._exit(child())
|
||||
else:
|
||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
ret = profile.Profile.runcall(prof, child)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
@@ -297,18 +245,9 @@ class BitbakeWorker(object):
|
||||
self.build_pipes = {}
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
if "beef" in sys.argv[1]:
|
||||
bb.utils.set_process_name("Worker (Fakeroot)")
|
||||
else:
|
||||
bb.utils.set_process_name("Worker")
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker received SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker received SIGHUP, shutting down...")
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
@@ -316,16 +255,13 @@ class BitbakeWorker(object):
|
||||
def serve(self):
|
||||
while True:
|
||||
(ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
|
||||
if self.input in ready:
|
||||
if self.input in ready or len(self.queue):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
r = self.input.read()
|
||||
if len(r) == 0:
|
||||
# EOF on pipe, server must have terminated
|
||||
self.sigterm_exception(signal.SIGTERM, None)
|
||||
self.queue = self.queue + r
|
||||
self.queue = self.queue + self.input.read()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if len(self.queue):
|
||||
end = len(self.queue)
|
||||
self.handle_item("cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item("workerdata", self.handle_workerdata)
|
||||
self.handle_item("runtask", self.handle_runtask)
|
||||
@@ -365,7 +301,7 @@ class BitbakeWorker(object):
|
||||
def handle_ping(self, _):
|
||||
workerlog_write("Handling ping\n")
|
||||
|
||||
logger.warning("Pong from bitbake-worker!")
|
||||
logger.warn("Pong from bitbake-worker!")
|
||||
|
||||
def handle_quit(self, data):
|
||||
workerlog_write("Handling quit\n")
|
||||
@@ -426,16 +362,7 @@ class BitbakeWorker(object):
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
profname = "profile-worker.log"
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
profile.Profile.runcall(prof, worker.serve)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
worker.serve()
|
||||
except BaseException as e:
|
||||
if not normalexit:
|
||||
import traceback
|
||||
|
||||
@@ -462,7 +462,7 @@ def main():
|
||||
state_group = 2
|
||||
|
||||
for key in bb.data.keys(documentation):
|
||||
data = documentation.getVarFlag(key, "doc", False)
|
||||
data = documentation.getVarFlag(key, "doc")
|
||||
if not data:
|
||||
continue
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
@@ -119,4 +119,4 @@ if __name__ == '__main__':
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
traceback.print_exc(3)
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
#!/bin/echo ERROR: This script needs to be sourced. Please run as .
|
||||
|
||||
# toaster - shell script to start Toaster
|
||||
|
||||
# Copyright (C) 2013-2015 Intel Corp.
|
||||
#!/bin/bash
|
||||
# (c) 2013 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -15,278 +12,203 @@
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see http://www.gnu.org/licenses/.
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
HELP="
|
||||
Usage: source toaster start|stop [webport=<address:port>] [noweb]
|
||||
Optional arguments:
|
||||
[noweb] Setup the environment for building with toaster but don't start the development server
|
||||
[webport] Set the development server (default: localhost:8000)
|
||||
"
|
||||
|
||||
webserverKillAll()
|
||||
# This script enables toaster event logging and
|
||||
# starts bitbake resident server
|
||||
# use as: source toaster [start|stop]
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
function webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
# Kill processes if they are still running - may happen
|
||||
# in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
# you can always add a superuser later via
|
||||
# ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
|
||||
$MANAGE migrate --noinput || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed migrations, aborting system start" 1>&2
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
echo -e "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
fi
|
||||
}
|
||||
|
||||
$MANAGE checksettings --traceback || retval=1
|
||||
# Helper functions to add a special configuration file
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError while checking settings; aborting\n"
|
||||
return $retval
|
||||
fi
|
||||
|
||||
echo "Starting webserver..."
|
||||
|
||||
$MANAGE runserver "$ADDR_PORT" \
|
||||
</dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
|
||||
& echo $! >${BUILDDIR}/.toastermain.pid
|
||||
|
||||
sleep 1
|
||||
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Toaster development webserver started at http://$ADDR_PORT"
|
||||
echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
|
||||
fi
|
||||
|
||||
return $retval
|
||||
function addtoConfiguration()
|
||||
{
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
function stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
if [ $INSTOPSYSTEM == 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
if [ -f ${BUILDDIR}/.toasterui.pid ]; then
|
||||
kill `cat ${BUILDDIR}/.toasterui.pid` 2>/dev/null
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=localhost:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset DATABASE_URL
|
||||
unset TOASTER_CONF
|
||||
unset TOASTER_DIR
|
||||
unset BITBAKE_UI
|
||||
unset BBBASEDIR
|
||||
# force stop any misbehaving bitbake server
|
||||
lsof bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill
|
||||
trap - SIGHUP
|
||||
#trap - SIGCHLD
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
verify_prereq() {
|
||||
# Verify Django version
|
||||
reqfile=$(python -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
|
||||
exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
|
||||
exp=$exp'import sys,django;version=django.get_version().split(".");'
|
||||
exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
|
||||
if ! sed -n "$exp" $reqfile | python - ; then
|
||||
req=`grep ^Django $reqfile`
|
||||
echo "This program needs $req"
|
||||
echo "Please install with pip install -r $reqfile"
|
||||
return 2
|
||||
fi
|
||||
|
||||
return 0
|
||||
function check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 $(< $1) 2>/dev/null
|
||||
}
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
fi
|
||||
|
||||
export BBBASEDIR=`dirname $TOASTER`/..
|
||||
MANAGE=$BBBASEDIR/lib/toaster/manage.py
|
||||
OEROOT=`dirname $TOASTER`/../..
|
||||
|
||||
# this is the configuraton file we are using for toaster
|
||||
# we are using the same logic that oe-setup-builddir uses
|
||||
# (based on TEMPLATECONF and .templateconf) to determine
|
||||
# which toasterconf.json to use.
|
||||
# note: There are a number of relative path assumptions
|
||||
# in the local layers that currently make using an arbitrary
|
||||
# toasterconf.json difficult.
|
||||
|
||||
. $OEROOT/.templateconf
|
||||
if [ -n "$TEMPLATECONF" ]; then
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
# Allow TEMPLATECONF=meta-xyz/conf as a shortcut
|
||||
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
|
||||
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
|
||||
fi
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
echo >&2 "Error: '$TEMPLATECONF' must be a directory containing toasterconf.json"
|
||||
return 1
|
||||
fi
|
||||
function notify_chldexit() {
|
||||
if [ $NOTOASTERUI == 0 ]; then
|
||||
check_pidbyfile ${BUILDDIR}/.toasterui.pid && return
|
||||
stop_system
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "$TOASTER_CONF" = "" ]; then
|
||||
TOASTER_CONF="$TEMPLATECONF/toasterconf.json"
|
||||
export TOASTER_CONF=$(python -c "import os; print(os.path.realpath('$TOASTER_CONF'))")
|
||||
fi
|
||||
|
||||
if [ ! -f $TOASTER_CONF ]; then
|
||||
echo "$TOASTER_CONF configuration file not found. Set TOASTER_CONF to specify file or fix .templateconf"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
|
||||
export TOASTER_DIR=`pwd`
|
||||
|
||||
WEBSERVER=1
|
||||
ADDR_PORT="localhost:8000"
|
||||
unset CMD
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
start )
|
||||
CMD=$param
|
||||
;;
|
||||
stop )
|
||||
CMD=$param
|
||||
;;
|
||||
webport=*)
|
||||
ADDR_PORT="${param#*=}"
|
||||
# Split the addr:port string
|
||||
ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
|
||||
PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
|
||||
# If only a port has been speified then set address to localhost.
|
||||
if [ $ADDR = $PORT ] ; then
|
||||
ADDR_PORT="localhost:$PORT"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "$HELP"
|
||||
return 1
|
||||
;;
|
||||
|
||||
esac
|
||||
done
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as . $TOASTER"
|
||||
return 1
|
||||
fi
|
||||
|
||||
verify_prereq || return 1
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as 'source toaster [start|stop]'" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd`
|
||||
# like it currently does.
|
||||
export TOASTER_DIR=`dirname $BUILDDIR`
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,5) == django.VERSION[0:2]" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5. Please install with\n\nsudo pip install django==1.5"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\nsudo pip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$CMD" = "start" ] ; then
|
||||
if [ -n "$BBSERVER" ]; then
|
||||
echo " Toaster is already running. Exiting..."
|
||||
return 1
|
||||
fi
|
||||
elif [ "$CMD" = "" ]; then
|
||||
echo "No command specified"
|
||||
echo "$HELP"
|
||||
return 1
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
|
||||
lock=1
|
||||
if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
# check if addr:port is not in use
|
||||
if [ "$CMD" == 'start' ]; then
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
$MANAGE checksocket "$ADDR_PORT" || return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# kill Toaster web server if it's alive
|
||||
if [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running." 1>&2
|
||||
echo " Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
# Create configuration file
|
||||
conf=${BUILDDIR}/conf/local.conf
|
||||
line='INHERIT+="toaster buildhistory"'
|
||||
grep -q "$line" $conf || echo $line >> $conf
|
||||
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
start_success=1
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
export BITBAKE_UI='toasterui'
|
||||
export DATABASE_URL=`$MANAGE get-dburl`
|
||||
$MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
|
||||
# set fail safe stop system on terminal exit
|
||||
unset BBSERVER
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=localhost:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
#trap notify_chldexit SIGCHLD
|
||||
;;
|
||||
stop )
|
||||
stop_system
|
||||
echo "Successful ${CMD}."
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program and set exit code to the returned value
|
||||
sys.exit(toasterui.main(mock_connection.connection, mock_connection.events, configParams))
|
||||
@@ -36,7 +36,7 @@ def main(argv=None):
|
||||
Get the mapping for the target recipe.
|
||||
"""
|
||||
if len(argv) != 1:
|
||||
print("Error, need one argument!", file=sys.stderr)
|
||||
print >>sys.stderr, "Error, need one argument!"
|
||||
return 2
|
||||
|
||||
cachefile = argv[0]
|
||||
@@ -56,7 +56,7 @@ def main(argv=None):
|
||||
continue
|
||||
|
||||
# 1.0 is the default version for a no PV recipe.
|
||||
if "pv" in val.__dict__:
|
||||
if val.__dict__.has_key("pv"):
|
||||
pv = val.pv
|
||||
else:
|
||||
pv = "1.0"
|
||||
|
||||
@@ -53,6 +53,7 @@ fun! NewBBTemplate()
|
||||
put ='LICENSE = \"\"'
|
||||
put ='SECTION = \"\"'
|
||||
put ='DEPENDS = \"\"'
|
||||
put ='PR = \"r0\"'
|
||||
put =''
|
||||
put ='SRC_URI = \"\"'
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# The Makefile generates an HTML and PDF version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
@@ -21,8 +21,8 @@
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
# The first example generates the HTML and PDF versions of the User Manual.
|
||||
# The second example generates the HTML version only of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@@ -31,9 +31,9 @@ XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
|
||||
@@ -1,25 +1,11 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
<xsl:include href="../template/component.title.xsl"/>
|
||||
<xsl:include href="../template/division.title.xsl"/>
|
||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
||||
<xsl:param name="chapter.autolabel" select="1" />
|
||||
<!-- <xsl:param name="appendix.autolabel" select="A" /> -->
|
||||
<xsl:param name="section.autolabel" select="1" />
|
||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
|
||||
<para>
|
||||
The primary purpose for running BitBake is to produce some kind
|
||||
of output such as a single installable package, a kernel, a software
|
||||
development kit, or even a full, board-specific bootable Linux image,
|
||||
complete with bootloader, kernel, and root filesystem.
|
||||
of output such as an image, a kernel, or a software development
|
||||
kit.
|
||||
Of course, you can execute the <filename>bitbake</filename>
|
||||
command with options that cause it to execute single tasks,
|
||||
compile single recipe files, capture or clear data, or simply
|
||||
@@ -21,48 +20,29 @@
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
||||
section.
|
||||
<note>
|
||||
<para>
|
||||
Prior to executing BitBake, you should take advantage of available
|
||||
parallel thread execution on your build host by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your project's <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A possibly simpler solution is that some Linux distributions
|
||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Prior to executing BitBake, you should take advantage of parallel
|
||||
thread execution by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</note>
|
||||
|
||||
<section id='parsing-the-base-configuration-metadata'>
|
||||
<title>Parsing the Base Configuration Metadata</title>
|
||||
|
||||
<para>
|
||||
The first thing BitBake does is parse base configuration
|
||||
metadata.
|
||||
Base configuration metadata consists of your project's
|
||||
Base configuration metadata consists of the
|
||||
<filename>bblayers.conf</filename> file to determine what
|
||||
layers BitBake needs to recognize, all necessary
|
||||
<filename>layer.conf</filename> files (one from each layer),
|
||||
@@ -91,11 +71,10 @@
|
||||
and
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
<filename>BBPATH</filename> is used to search for
|
||||
configuration and class files under the
|
||||
<filename>conf</filename> and <filename>classes</filename>
|
||||
configuration and class files under
|
||||
<filename>conf/</filename> and <filename>class/</filename>
|
||||
directories, respectively.
|
||||
<filename>BBFILES</filename> is used to locate both recipe
|
||||
and recipe append files
|
||||
<filename>BBFILES</filename> is used to find recipe files
|
||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
||||
If there is no <filename>bblayers.conf</filename> file,
|
||||
it is assumed the user has set the <filename>BBPATH</filename>
|
||||
@@ -103,7 +82,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, the <filename>bitbake.conf</filename> file is located
|
||||
Next, the <filename>bitbake.conf</filename> file is searched
|
||||
using the <filename>BBPATH</filename> variable that was
|
||||
just constructed.
|
||||
The <filename>bitbake.conf</filename> file may also include other
|
||||
@@ -116,35 +95,16 @@
|
||||
Prior to parsing configuration files, Bitbake looks
|
||||
at certain variables, including:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
|
||||
</para></listitem>
|
||||
<listitem><para><link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link></para></listitem>
|
||||
<listitem><para><link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link></para></listitem>
|
||||
<listitem><para><link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link></para></listitem>
|
||||
<listitem><para>
|
||||
<link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
The first four variables in this list relate to how BitBake treats shell
|
||||
environment variables during task execution.
|
||||
By default, BitBake cleans the environment variables and provides tight
|
||||
control over the shell execution environment.
|
||||
However, through the use of these first four variables, you can
|
||||
apply your control regarding the
|
||||
environment variables allowed to be used by BitBake in the shell
|
||||
during execution of tasks.
|
||||
See the
|
||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>"
|
||||
section and the information about these variables in the
|
||||
variable glossary for more information on how they work and
|
||||
on how to use them.
|
||||
You can find information on how to pass environment variables into the BitBake
|
||||
execution environment in the
|
||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>" section.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -157,18 +117,18 @@
|
||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
||||
This file is expected to contain a
|
||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
||||
variable that is a space-delimited list of 'layer' directories.
|
||||
variable that is a space delimited list of 'layer' directories.
|
||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> variables directly in the environment.
|
||||
file then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
||||
file is located and parsed with the
|
||||
file is searched for and parsed with the
|
||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
||||
variable being set to the directory where the layer was found.
|
||||
The idea is these files automatically set up
|
||||
The idea is these files automatically setup
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and other variables correctly for a given build directory.
|
||||
</para>
|
||||
@@ -183,7 +143,7 @@
|
||||
|
||||
<para>
|
||||
Only variable definitions and include directives are allowed
|
||||
in BitBake <filename>.conf</filename> files.
|
||||
in <filename>.conf</filename> files.
|
||||
Some variables directly influence BitBake's behavior.
|
||||
These variables might have been set from the environment
|
||||
depending on the environment variables previously
|
||||
@@ -206,8 +166,7 @@
|
||||
Other classes that are specified in the configuration using the
|
||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
||||
variable are also included.
|
||||
BitBake searches for class files in a
|
||||
<filename>classes</filename> subdirectory under
|
||||
BitBake searches for class files in a "classes" subdirectory under
|
||||
the paths in <filename>BBPATH</filename> in the same way as
|
||||
configuration files.
|
||||
</para>
|
||||
@@ -230,7 +189,7 @@
|
||||
If a recipe uses a closing curly brace within the function and
|
||||
the character has no leading spaces, BitBake produces a parsing
|
||||
error.
|
||||
If you use a pair of curly braces in a shell function, the
|
||||
If you use a pair of curly brace in a shell function, the
|
||||
closing curly brace must not be located at the start of the line
|
||||
without leading spaces.
|
||||
</para>
|
||||
@@ -302,14 +261,14 @@
|
||||
One common convention is to use the recipe filename to define
|
||||
pieces of metadata.
|
||||
For example, in <filename>bitbake.conf</filename> the recipe
|
||||
name and version are used to set the variables
|
||||
name and version set
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
In this example, a recipe called "something_1.2.3.bb" sets
|
||||
<filename>PN</filename> to "something" and
|
||||
<filename>PV</filename> to "1.2.3".
|
||||
</para>
|
||||
@@ -372,55 +331,38 @@
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-providers'>
|
||||
<title>Providers</title>
|
||||
<title>Preferences and Providers</title>
|
||||
|
||||
<para>
|
||||
Assuming BitBake has been instructed to execute a target
|
||||
and that all the recipe files have been parsed, BitBake
|
||||
starts to figure out how to build the target.
|
||||
BitBake looks through the <filename>PROVIDES</filename> list
|
||||
for each of the recipes.
|
||||
A <filename>PROVIDES</filename> list is the list of names by which
|
||||
the recipe can be known.
|
||||
Each recipe's <filename>PROVIDES</filename> list is created
|
||||
implicitly through the recipe's
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
and explicitly through the recipe's
|
||||
BitBake starts by looking through the
|
||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
||||
variable, which is optional.
|
||||
set in recipe files.
|
||||
The default <filename>PROVIDES</filename> for a recipe is its name
|
||||
(<link linkend='var-PN'><filename>PN</filename></link>),
|
||||
however, a recipe can provide multiple things.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
||||
functionality can be found under an alternative name or names other
|
||||
than the implicit <filename>PN</filename> name.
|
||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
||||
contained the following:
|
||||
As an example of adding an extra provider, suppose a recipe named
|
||||
<filename>foo_1.0.bb</filename> contained the following:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "fullkeyboard"
|
||||
PROVIDES += "virtual/bar_1.0"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
||||
Consequently, the functionality found in
|
||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
||||
different names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-preferences'>
|
||||
<title>Preferences</title>
|
||||
|
||||
<para>
|
||||
The <filename>PROVIDES</filename> list is only part of the solution
|
||||
for figuring out a target's recipes.
|
||||
Because targets might have multiple providers, BitBake needs
|
||||
to prioritize providers by determining provider preferences.
|
||||
The recipe now provides both "foo_1.0" and "virtual/bar_1.0".
|
||||
The "virtual/" namespace is often used to denote cases where
|
||||
multiple providers are expected with the user choosing between
|
||||
them.
|
||||
Kernels and toolchain components are common cases of this in
|
||||
OpenEmbedded.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common example in which a target has multiple providers
|
||||
is "virtual/kernel", which is on the
|
||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
||||
Sometimes a target might have multiple providers.
|
||||
A common example is "virtual/kernel", which is provided by each
|
||||
kernel recipe.
|
||||
Each machine often selects the best kernel provider by using a
|
||||
line similar to the following in the machine configuration file:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -435,7 +377,7 @@
|
||||
|
||||
<para>
|
||||
Understanding how providers are chosen is made complicated by the fact
|
||||
that multiple versions might exist for a given provider.
|
||||
that multiple versions might exist.
|
||||
BitBake defaults to the highest version of a provider.
|
||||
Version comparisons are made using the same method as Debian.
|
||||
You can use the
|
||||
@@ -444,19 +386,13 @@
|
||||
You can influence the order by using the
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, files have a preference of "0".
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
recipe unlikely to be used unless it is explicitly referenced.
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
||||
likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any
|
||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
||||
and more experimental recipe versions until they have undergone
|
||||
sufficient testing to be considered stable.
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "1" makes it likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any <filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer and more experimental recipe
|
||||
versions until they have undergone sufficient testing to be considered stable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -465,16 +401,18 @@
|
||||
version, unless otherwise specified.
|
||||
If the recipe in question has a
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
set lower than the other recipes (default is 0), then
|
||||
it will not be selected.
|
||||
set lower than
|
||||
the other recipes (default is 0), then it will not be
|
||||
selected.
|
||||
This allows the person or persons maintaining
|
||||
the repository of recipe files to specify
|
||||
their preference for the default selected version.
|
||||
Additionally, the user can specify their preferred version.
|
||||
In addition, the user can specify their preferred version.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>,
|
||||
then the
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
will be set to “a”, and the
|
||||
<link linkend='var-PV'><filename>PV</filename></link>
|
||||
@@ -482,38 +420,19 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
||||
If we then have a recipe named <filename>a_1.2.bb</filename>, BitBake
|
||||
will choose 1.2 by default.
|
||||
However, if you define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, you
|
||||
can change that preference:
|
||||
However, if we define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, we
|
||||
can change that.
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_VERSION_a = "1.1"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
It is common for a recipe to provide two versions -- a stable,
|
||||
numbered (and preferred) version, and a version that is
|
||||
automatically checked out from a source code repository that
|
||||
is considered more "bleeding edge" but can be selected only
|
||||
explicitly.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For example, in the OpenEmbedded codebase, there is a standard,
|
||||
versioned recipe file for BusyBox,
|
||||
<filename>busybox_1.22.1.bb</filename>,
|
||||
but there is also a Git-based version,
|
||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
||||
<literallayout class='monospaced'>
|
||||
DEFAULT_PREFERENCE = "-1"
|
||||
</literallayout>
|
||||
to ensure that the numbered, stable version is always preferred
|
||||
unless the developer selects otherwise.
|
||||
</para>
|
||||
</note>
|
||||
<para>
|
||||
In summary, BitBake has created a list of providers, which is prioritized, for each target.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-dependencies'>
|
||||
@@ -576,7 +495,7 @@
|
||||
As each task completes, a timestamp is written to the directory specified by the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
||||
On subsequent runs, BitBake looks in the build directory within
|
||||
<filename>tmp/stamps</filename> and does not rerun
|
||||
<filename>tmp/stamps</filename>and does not rerun
|
||||
tasks that are already completed unless a timestamp is found to be invalid.
|
||||
Currently, invalid timestamps are only considered on a per
|
||||
recipe file basis.
|
||||
@@ -616,7 +535,7 @@
|
||||
<title>Executing Tasks</title>
|
||||
|
||||
<para>
|
||||
Tasks can be either a shell task or a Python task.
|
||||
Tasks can either be a shell task or a Python task.
|
||||
For shell tasks, BitBake writes a shell script to
|
||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
||||
and then executes the script.
|
||||
@@ -803,13 +722,13 @@
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_task-<taskname></filename>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_<filename:taskname></filename>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BBHASHDEPS_<filename:taskname></filename>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
@@ -836,9 +755,9 @@
|
||||
to determine the stamps and delta where these two
|
||||
stamp trees diverge.
|
||||
<note>
|
||||
It is likely that future versions of BitBake will
|
||||
It is likely that future versions of BitBake with
|
||||
provide other signature handlers triggered through
|
||||
additional "-S" parameters.
|
||||
additional "-S" paramters.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
BitBake's fetch module is a standalone piece of library code
|
||||
that deals with the intricacies of downloading source code
|
||||
and files from remote systems.
|
||||
Fetching source code is one of the cornerstones of building software.
|
||||
Fetching source code is one of the corner stones of building software.
|
||||
As such, this module forms an important part of BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The current fetch module is called "fetch2" and refers to the
|
||||
fact that it is the second major version of the API.
|
||||
The original version is obsolete and has been removed from the codebase.
|
||||
The original version is obsolete and removed from the codebase.
|
||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
||||
manual.
|
||||
</para>
|
||||
@@ -60,19 +60,17 @@
|
||||
<note>
|
||||
For convenience, the naming in these examples matches
|
||||
the variables used by OpenEmbedded.
|
||||
If you want to see the above code in action, examine
|
||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
||||
</note>
|
||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
||||
variables are not hardcoded into the fetcher, since those fetcher
|
||||
methods can be (and are) called with different variable names.
|
||||
variables are not coded into the fetcher.
|
||||
They variables can (and are) called with different variable names.
|
||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
||||
the fetch module to fetch the sstate files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When the <filename>download()</filename> method is called,
|
||||
BitBake tries to resolve the URLs by looking for source files
|
||||
BitBake tries to fulfill the URLs by looking for source files
|
||||
in a specific search order:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
||||
@@ -86,7 +84,7 @@
|
||||
<filename>SRC_URI</filename>).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
||||
If fetch failures occur, BitBake next uses mirror locations as
|
||||
If fetch failures occur, BitBake next uses mirror location as
|
||||
defined by the
|
||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
||||
variable.
|
||||
@@ -151,14 +149,14 @@
|
||||
<para>
|
||||
File integrity is of key importance for reproducing builds.
|
||||
For non-local archive downloads, the fetcher code can verify
|
||||
SHA-256 and MD5 checksums to ensure the archives have been
|
||||
sha256 and md5 checksums to ensure the archives have been
|
||||
downloaded correctly.
|
||||
You can specify these checksums by using the
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[md5sum] = "value"
|
||||
SRC_URI[sha256sum] = "value"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
@@ -262,8 +260,8 @@
|
||||
<para>
|
||||
This submodule handles URLs that begin with
|
||||
<filename>file://</filename>.
|
||||
The filename you specify within the URL can be
|
||||
either an absolute or relative path to a file.
|
||||
The filename you specify with in the URL can
|
||||
either be an absolute or relative path to a file.
|
||||
If the filename is relative, the contents of the
|
||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
||||
variable is used in the same way
|
||||
@@ -288,64 +286,15 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of example URLs, the first relative and
|
||||
the second absolute:
|
||||
Here are some example URLs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "file://relativefile.patch"
|
||||
SRC_URI = "file://relativefile.patch;this=ignored"
|
||||
SRC_URI = "file:///Users/ich/very_important_software"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
<note>
|
||||
Because URL parameters are delimited by semi-colons, this can
|
||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
||||
for example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
||||
</literallayout>
|
||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
||||
</literallayout>
|
||||
In most cases this should work. Treating semi-colons and '&' in queries
|
||||
identically is recommended by the World Wide Web Consortium (W3C).
|
||||
Note that due to the nature of the URL, you may have to specify the name
|
||||
of the downloaded file as well:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
||||
</literallayout>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
||||
|
||||
@@ -364,7 +313,7 @@
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
<listitem><para><emphasis><filename>CVSDIR</filename>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
@@ -385,7 +334,7 @@
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"method":</emphasis>
|
||||
The protocol over which to communicate with the CVS server.
|
||||
The protocol over which to communicate with the cvs server.
|
||||
By default, this protocol is "pserver".
|
||||
If "method" is set to "ext", BitBake examines the
|
||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
||||
@@ -413,8 +362,7 @@
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
directory relative to <filename>CVSDIR</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
@@ -446,6 +394,36 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to a sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handheld.sorg/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='svn-fetcher'>
|
||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
||||
|
||||
@@ -455,9 +433,9 @@
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
The fetcher's temporary working directory is set
|
||||
by <filename>SVNDIR</filename>, which is usually
|
||||
<filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -488,13 +466,6 @@
|
||||
compile-time when set to "keep".
|
||||
By default, these directories are removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
||||
When required, sets the username for the transport.
|
||||
By default, this parameter is empty.
|
||||
The transport username is different than the username
|
||||
used in the main URL, which is passed to the subversion
|
||||
command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Following are two examples using svn:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -505,15 +476,14 @@
|
||||
</section>
|
||||
|
||||
<section id='git-fetcher'>
|
||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
||||
<title>GIT Fetcher (<filename>git://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
@@ -581,7 +551,7 @@
|
||||
network.
|
||||
For that reason, tags are often not used.
|
||||
As far as Git is concerned, the "tag" parameter behaves
|
||||
effectively the same as the "rev" parameter.
|
||||
effectively the same as the "revision" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"subpath":</emphasis>
|
||||
Limits the checkout to a specific subpath of the tree.
|
||||
@@ -600,122 +570,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='gitsm-fetcher'>
|
||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule inherits from the
|
||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
||||
that fetcher's behavior by fetching a repository's submodules.
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
is passed to the Git fetcher as described in the
|
||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
||||
section.
|
||||
<note>
|
||||
<title>Notes and Warnings</title>
|
||||
<para>
|
||||
You must clean a recipe when switching between
|
||||
'<filename>git://</filename>' and
|
||||
'<filename>gitsm://</filename>' URLs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Git Submodules fetcher is not a complete fetcher
|
||||
implementation.
|
||||
The fetcher has known issues where it does not use the
|
||||
normal source mirroring infrastructure properly.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='clearcase-fetcher'>
|
||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from a
|
||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
||||
repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
which one is available.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are options for the <filename>SRC_URI</filename>
|
||||
statement:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
||||
The name, which must include the
|
||||
prepending "/" character, of the ClearCase VOB.
|
||||
This option is required.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
<filename>https</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, the fetcher creates a configuration specification.
|
||||
If you want this specification written to an area other than the default,
|
||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
||||
in your recipe to define where the specification is written.
|
||||
<note>
|
||||
the <filename>SRCREV</filename> loses its functionality if you
|
||||
specify this variable.
|
||||
However, <filename>SRCREV</filename> is still used to label the
|
||||
archive after a fetch even though it does not define what is
|
||||
fetched.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of other behaviors worth mentioning:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
When using <filename>cleartool</filename>, the login of
|
||||
<filename>cleartool</filename> is handled by the system.
|
||||
The login require no special steps.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
In order to use <filename>rcleartool</filename> with authenticated
|
||||
users, an "rcleartool login" is necessary before using the fetcher.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
@@ -728,6 +582,9 @@
|
||||
<listitem><para>
|
||||
Perforce (<filename>p4://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Git Submodules (<filename>gitsm://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
-rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
|
||||
drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
|
||||
-rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
|
||||
-rwxrwxr-x. 1 wmat wmat 3195 Jan 31 11:57 setup.py
|
||||
-rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -220,7 +221,7 @@
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ BBPATH="<projectdirectory>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
@@ -470,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
Welcome to the BitBake User Manual.
|
||||
This manual provides information on the BitBake tool.
|
||||
The information attempts to be as independent as possible regarding
|
||||
systems that use BitBake, such as OpenEmbedded and the
|
||||
Yocto Project.
|
||||
In some cases, scenarios or examples within the context of
|
||||
systems that use BitBake, such as the Yocto Project and
|
||||
OpenEmbedded.
|
||||
In some cases, scenarios or examples that within the context of
|
||||
a build system are used in the manual to help with understanding.
|
||||
For these cases, the manual clearly states the context.
|
||||
</para>
|
||||
@@ -35,31 +35,28 @@
|
||||
<listitem><para>
|
||||
BitBake executes tasks according to provided
|
||||
metadata that builds up the tasks.
|
||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
||||
and related recipe "append" (<filename>.bbappend</filename>)
|
||||
files, configuration (<filename>.conf</filename>) and
|
||||
underlying include (<filename>.inc</filename>) files, and
|
||||
in class (<filename>.bbclass</filename>) files.
|
||||
The metadata provides
|
||||
Metadata is stored in recipe (<filename>.bb</filename>),
|
||||
configuration (<filename>.conf</filename>), and class
|
||||
(<filename>.bbclass</filename>) files and provides
|
||||
BitBake with instructions on what tasks to run and
|
||||
the dependencies between those tasks.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a fetcher library for obtaining source
|
||||
code from various places such as local files, source control
|
||||
systems, or websites.
|
||||
code from various places such as source control
|
||||
systems or websites.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The instructions for each unit to be built (e.g. a piece
|
||||
of software) are known as "recipe" files and
|
||||
of software) are known as recipe files and
|
||||
contain all the information about the unit
|
||||
(dependencies, source file locations, checksums, description
|
||||
and so on).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a client/server abstraction and can
|
||||
be used from a command line or used as a service over
|
||||
XML-RPC and has several different user interfaces.
|
||||
be used from a command line or used as a service over XMLRPC and
|
||||
has several different user interfaces.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -72,7 +69,7 @@
|
||||
BitBake was originally a part of the OpenEmbedded project.
|
||||
It was inspired by the Portage package management system
|
||||
used by the Gentoo Linux distribution.
|
||||
On December 7, 2004, OpenEmbedded project team member
|
||||
On December 7, 2004, OpenEmbedded project team member,
|
||||
Chris Larson split the project into two distinct pieces:
|
||||
<itemizedlist>
|
||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
||||
@@ -82,11 +79,8 @@
|
||||
Today, BitBake is the primary basis of the
|
||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
||||
project, which is being used to build and maintain Linux
|
||||
distributions such as the
|
||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
||||
and which is also being used as the build tool for Linux projects
|
||||
such as the
|
||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
||||
distributions such as the Angstrom Distribution and which is used
|
||||
as the build tool for Linux projects such as the Yocto Project.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -94,7 +88,7 @@
|
||||
an aspiring embedded Linux distribution.
|
||||
All of the build systems used by traditional desktop Linux
|
||||
distributions lacked important functionality, and none of the
|
||||
ad hoc Buildroot-based systems, prevalent in the
|
||||
ad-hoc Buildroot-based systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.
|
||||
</para>
|
||||
|
||||
@@ -144,7 +138,7 @@
|
||||
projects for their builds.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Provide an inheritance mechanism to share
|
||||
Provide an inheritance mechanism that share
|
||||
common metadata between many packages.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -157,7 +151,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Split metadata into layers and allow layers
|
||||
to enhance or override other layers.
|
||||
to override each other.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Allow representation of a given set of input variables
|
||||
@@ -184,14 +178,14 @@
|
||||
what tasks are required to run, and executes those tasks.
|
||||
Similar to GNU Make, BitBake controls how software is
|
||||
built.
|
||||
GNU Make achieves its control through "makefiles", while
|
||||
GNU Make achieves its control through "makefiles".
|
||||
BitBake uses "recipes".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake extends the capabilities of a simple
|
||||
tool like GNU Make by allowing for the definition of much more
|
||||
complex tasks, such as assembling entire embedded Linux
|
||||
tool like GNU Make by allowing for much more complex tasks
|
||||
to be completed, such as assembling entire embedded Linux
|
||||
distributions.
|
||||
</para>
|
||||
|
||||
@@ -209,20 +203,14 @@
|
||||
<filename>.bb</filename>, are the most basic metadata files.
|
||||
These recipe files provide BitBake with the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>Descriptive information about the
|
||||
package (author, homepage, license, and so on)</para></listitem>
|
||||
<listitem><para>Descriptive information about the package</para></listitem>
|
||||
<listitem><para>The version of the recipe</para></listitem>
|
||||
<listitem><para>Existing dependencies (both build
|
||||
and runtime dependencies)</para></listitem>
|
||||
<listitem><para>Where the source code resides and
|
||||
how to fetch it</para></listitem>
|
||||
<listitem><para>Whether the source code requires
|
||||
any patches, where to find them, and how to apply
|
||||
them</para></listitem>
|
||||
<listitem><para>How to configure and compile the
|
||||
source code</para></listitem>
|
||||
<listitem><para>Existing Dependencies</para></listitem>
|
||||
<listitem><para>Where the source code resides</para></listitem>
|
||||
<listitem><para>Whether the source code requires any patches</para></listitem>
|
||||
<listitem><para>How to compile the source code</para></listitem>
|
||||
<listitem><para>Where on the target machine to install the
|
||||
package or packages created</para></listitem>
|
||||
package being compiled</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -234,11 +222,7 @@
|
||||
The term "package" is also commonly used to describe recipes.
|
||||
However, since the same word is used to describe packaged
|
||||
output from a project, it is best to maintain a single
|
||||
descriptive term - "recipes".
|
||||
Put another way, a single "recipe" file is quite capable
|
||||
of generating a number of related but separately installable
|
||||
"packages".
|
||||
In fact, that ability is fairly common.
|
||||
descriptive term, "recipes".
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
@@ -273,7 +257,7 @@
|
||||
called <filename>base.bbclass</filename>.
|
||||
You can find this file in the
|
||||
<filename>classes</filename> directory.
|
||||
The <filename>base.bbclass</filename> class files is special since it
|
||||
The <filename>base.bbclass</filename> is special since it
|
||||
is always included automatically for all recipes
|
||||
and classes.
|
||||
This class contains definitions for standard basic tasks such
|
||||
@@ -300,8 +284,7 @@
|
||||
To illustrate how you can use layers to keep things modular,
|
||||
consider customizations you might make to support a specific target machine.
|
||||
These types of customizations typically reside in a special layer,
|
||||
rather than a general layer, called a Board Support Package (BSP)
|
||||
Layer.
|
||||
rather than a general layer, called a Board Specific Package (BSP) Layer.
|
||||
Furthermore, the machine customizations should be isolated from
|
||||
recipes and metadata that support a new GUI environment, for
|
||||
example.
|
||||
@@ -321,8 +304,9 @@
|
||||
|
||||
<para>
|
||||
Append files, which are files that have the
|
||||
<filename>.bbappend</filename> file extension, extend or
|
||||
override information in an existing recipe file.
|
||||
<filename>.bbappend</filename> file extension, add or
|
||||
extend build information to an existing
|
||||
recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -335,9 +319,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Information in append files extends or
|
||||
overrides the information in the underlying,
|
||||
similarly-named recipe files.
|
||||
Information in append files overrides the information in the
|
||||
similarly-named recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -362,12 +345,6 @@
|
||||
However, if you named the append file
|
||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the most general case, you could name the append file something as
|
||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
||||
version independent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -436,20 +413,6 @@
|
||||
you have a directory entitled
|
||||
<filename>bitbake-1.17.0</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
||||
Build Checkout:</emphasis>
|
||||
A final possibility for getting a copy of BitBake is that it
|
||||
already comes with your checkout of a larger Bitbake-based build
|
||||
system, such as Poky or Yocto Project.
|
||||
Rather than manually checking out individual layers and
|
||||
gluing them together yourself, you can check
|
||||
out an entire build system.
|
||||
The checkout will already include a version of BitBake that
|
||||
has been thoroughly tested for compatibility with the other
|
||||
components.
|
||||
For information on how to check out a particular BitBake-based
|
||||
build system, consult that build system's supporting documentation.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -471,7 +434,7 @@
|
||||
Following is the usage and syntax for BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -h
|
||||
Usage: bitbake [options] [recipename/target recipe:do_task ...]
|
||||
Usage: bitbake [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
@@ -508,16 +471,14 @@
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run Don't execute, just go through the motions.
|
||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
||||
-S DUMP_SIGNATURES, --dump-signatures=DUMP_SIGNATURES
|
||||
Dump out the signature construction information, with
|
||||
no task execution. The SIGNATURE_HANDLER parameter is
|
||||
passed to the handler. Two common values are none and
|
||||
printdiff but the handler may define more/less. none
|
||||
means only dump the signature, printdiff means compare
|
||||
the dumped signature with the cached one.
|
||||
no task execution. Parameters are passed to the
|
||||
signature handling code, use 'none' if no specific
|
||||
handler is required.
|
||||
-p, --parse-only Quit after parsing the BB recipes.
|
||||
-s, --show-versions Show current and preferred versions of all recipes.
|
||||
-e, --environment Show the global or per-recipe environment complete
|
||||
-e, --environment Show the global or per-package environment complete
|
||||
with information about where variables were
|
||||
set/changed.
|
||||
-g, --graphviz Save dependency tree information for the specified
|
||||
@@ -529,13 +490,9 @@
|
||||
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
||||
Show debug logging for the specified logging domains
|
||||
-P, --profile Profile the command and save reports.
|
||||
-u UI, --ui=UI The user interface to use (depexp, goggle, hob, knotty
|
||||
or ncurses - default knotty).
|
||||
-u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server type to use (process or xmlrpc -
|
||||
default process).
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
Choose which server to use, process or xmlrpc.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
floating revisions have changed or not.
|
||||
--server-only Run bitbake without a UI, only starting a server
|
||||
@@ -548,10 +505,6 @@
|
||||
-m, --kill-server Terminate the remote server.
|
||||
--observe-only Connect to a server as an observing-only client.
|
||||
--status-only Check the status of the remote bitbake server.
|
||||
-w WRITEEVENTLOG, --write-log=WRITEEVENTLOG
|
||||
Writes the event log of the build to a bitbake event
|
||||
json file. Use '' (empty string) to assign the name
|
||||
automatically.
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -606,14 +559,14 @@
|
||||
when one wants to manage multiple <filename>.bb</filename>
|
||||
files.
|
||||
Clearly there needs to be a way to tell BitBake what
|
||||
files are available and, of those, which you
|
||||
files are available, and of those, which you
|
||||
want to execute.
|
||||
There also needs to be a way for each recipe
|
||||
to express its dependencies, both for build-time and
|
||||
runtime.
|
||||
There must be a way for you to express recipe preferences
|
||||
when multiple recipes provide the same functionality, or when
|
||||
there are multiple versions of a recipe.
|
||||
there are multiple versions of a recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -159,10 +159,6 @@
|
||||
using the "+=" and "=+" operators.
|
||||
These operators insert a space between the current
|
||||
value and prepended or appended value.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -182,10 +178,6 @@
|
||||
<para>
|
||||
If you want to append or prepend values without an
|
||||
inserted space, use the ".=" and "=." operators.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -206,13 +198,6 @@
|
||||
You can also append and prepend a variable's value
|
||||
using an override style syntax.
|
||||
When you use this syntax, no spaces are inserted.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators differ from the ":=", ".=", "=.", "+=", and "=+"
|
||||
operators in that their effects are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -232,6 +217,13 @@
|
||||
override syntax.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The operators "_append" and "_prepend" differ from
|
||||
the operators ".=" and "=." in that they are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='removing-override-style-syntax'>
|
||||
@@ -291,18 +283,7 @@
|
||||
The variable <filename>FOO</filename> has two flags:
|
||||
<filename>a</filename> and <filename>b</filename>.
|
||||
The flags are immediately set to "abc" and "123", respectively.
|
||||
The <filename>a</filename> flag becomes "abc 456".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
No need exists to pre-define variable flags.
|
||||
You can simply start using them.
|
||||
One extremely common application
|
||||
is to attach some brief documentation to a BitBake variable as
|
||||
follows:
|
||||
<literallayout class='monospaced'>
|
||||
CACHE[doc] = "The directory holding the cache of the metadata."
|
||||
</literallayout>
|
||||
The <filename>a</filename> flag becomes "abc456".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -317,19 +298,7 @@
|
||||
DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
|
||||
</literallayout>
|
||||
This example results in the <filename>DATE</filename>
|
||||
variable being set to the current date.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Probably the most common use of this feature is to extract
|
||||
the value of variables from BitBake's internal data dictionary,
|
||||
<filename>d</filename>.
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
variable becoming the current date.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -379,12 +348,6 @@
|
||||
You can use <filename>OVERRIDES</filename> to conditionally select
|
||||
a specific version of a variable and to conditionally
|
||||
append or prepend the value of a variable.
|
||||
<note>
|
||||
Overrides can only use lower-case characters.
|
||||
Additionally, underscores are not permitted in override names
|
||||
as they are used to separate overrides from each other and
|
||||
from the variable name.
|
||||
</note>
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Selecting a Variable:</emphasis>
|
||||
The <filename>OVERRIDES</filename> variable is
|
||||
@@ -409,25 +372,6 @@
|
||||
You select the os-specific version of the <filename>TEST</filename>
|
||||
variable by appending the "os" override to the variable
|
||||
(i.e.<filename>TEST_os</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To better understand this, consider a practical example
|
||||
that assumes an OpenEmbedded metadata-based Linux
|
||||
kernel recipe file.
|
||||
The following lines from the recipe file first set
|
||||
the kernel branch variable <filename>KBRANCH</filename>
|
||||
to a default value, then conditionally override that
|
||||
value based on the architecture of the build:
|
||||
<literallayout class='monospaced'>
|
||||
KBRANCH = "standard/base"
|
||||
KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
|
||||
KBRANCH_qemumips = "standard/mti-malta32"
|
||||
KBRANCH_qemuppc = "standard/qemuppc"
|
||||
KBRANCH_qemux86 = "standard/common-pc/base"
|
||||
KBRANCH_qemux86-64 = "standard/common-pc-64/base"
|
||||
KBRANCH_qemumips64 = "standard/mti-malta64"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Appending and Prepending:</emphasis>
|
||||
BitBake also supports append and prepend operations to
|
||||
@@ -441,19 +385,6 @@
|
||||
</literallayout>
|
||||
In this example, <filename>DEPENDS</filename> becomes
|
||||
"glibc ncurses libmad".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Again, using an OpenEmbedded metadata-based
|
||||
kernel recipe file as an example, the
|
||||
following lines will conditionally append to the
|
||||
<filename>KERNEL_FEATURES</filename> variable based
|
||||
on the architecture:
|
||||
<literallayout class='monospaced'>
|
||||
KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -609,13 +540,8 @@
|
||||
BitBake uses the
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
variable to locate needed include and class files.
|
||||
Additionally, BitBake searches the current directory for
|
||||
<filename>include</filename> and <filename>require</filename>
|
||||
directives.
|
||||
<note>
|
||||
The <filename>BBPATH</filename> variable is analogous to
|
||||
the environment variable <filename>PATH</filename>.
|
||||
</note>
|
||||
The <filename>BBPATH</filename> variable is analogous to
|
||||
the environment variable <filename>PATH</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -663,51 +589,6 @@
|
||||
after the "inherit" statement.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If necessary, it is possible to inherit a class
|
||||
conditionally by using
|
||||
a variable expression after the <filename>inherit</filename>
|
||||
statement.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
inherit ${VARNAME}
|
||||
</literallayout>
|
||||
If <filename>VARNAME</filename> is going to be set, it needs
|
||||
to be set before the <filename>inherit</filename> statement
|
||||
is parsed.
|
||||
One way to achieve a conditional inherit in this case is to use
|
||||
overrides:
|
||||
<literallayout class='monospaced'>
|
||||
VARIABLE = ""
|
||||
VARIABLE_someoverride = "myclass"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Another method is by using anonymous Python.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
python () {
|
||||
if condition == value:
|
||||
d.setVar('VARIABLE', 'myclass')
|
||||
else:
|
||||
d.setVar('VARIABLE', '')
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Alternatively, you could use an in-line Python expression
|
||||
in the following form:
|
||||
<literallayout class='monospaced'>
|
||||
inherit ${@'classname' if condition else ''}
|
||||
inherit ${@functionname(params)}
|
||||
</literallayout>
|
||||
In all cases, if the expression evaluates to an empty
|
||||
string, the statement does not trigger a syntax error
|
||||
because it becomes a no-op.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='include-directive'>
|
||||
@@ -815,16 +696,6 @@
|
||||
<filename>INHERIT</filename> to inherit a class effectively
|
||||
inherits the class globally (i.e. for all recipes).
|
||||
</note>
|
||||
If you want to use the directive to inherit
|
||||
multiple classes, you can provide them on the same line in the
|
||||
<filename>local.conf</filename> file.
|
||||
Use spaces to separate the classes.
|
||||
The following example shows how to inherit both the
|
||||
<filename>autotools</filename> and <filename>pkgconfig</filename>
|
||||
classes:
|
||||
<literallayout class='monospaced'>
|
||||
inherit autotools pkgconfig
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
@@ -906,18 +777,6 @@
|
||||
is a global variable and is always automatically
|
||||
available.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Variable expressions (e.g. <filename>${X}</filename>) are no
|
||||
longer expanded within Python functions.
|
||||
This behavior is intentional in order to allow you to freely
|
||||
set variable values to expandable expressions without having
|
||||
them expanded prematurely.
|
||||
If you do wish to expand a variable within a Python function,
|
||||
use <filename>d.getVar("X", True)</filename>.
|
||||
Or, for more complicated expressions, use
|
||||
<filename>d.expand()</filename>.
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='python-functions'>
|
||||
@@ -1030,7 +889,7 @@
|
||||
<listitem><para>
|
||||
The class needs to define the function as follows:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>classname</replaceable><filename>_</filename><replaceable>functionname</replaceable>
|
||||
<classname>_<functionname>
|
||||
</literallayout>
|
||||
For example, if you have a class file
|
||||
<filename>bar.bbclass</filename> and a function named
|
||||
@@ -1044,7 +903,7 @@
|
||||
The class needs to contain the <filename>EXPORT_FUNCTIONS</filename>
|
||||
statement as follows:
|
||||
<literallayout class='monospaced'>
|
||||
EXPORT_FUNCTIONS <replaceable>functionname</replaceable>
|
||||
EXPORT_FUNCTIONS <functionname>
|
||||
</literallayout>
|
||||
For example, continuing with the same example, the
|
||||
statement in the <filename>bar.bbclass</filename> would be
|
||||
@@ -1143,41 +1002,13 @@
|
||||
<title>Deleting a Task</title>
|
||||
|
||||
<para>
|
||||
As well as being able to add tasks, you can delete them.
|
||||
Simply use the <filename>deltask</filename> command to
|
||||
delete a task.
|
||||
As well as being able to add tasks, tasks can also be deleted.
|
||||
This is done simply with <filename>deltask</filename> command.
|
||||
For example, to delete the example task used in the previous
|
||||
sections, you would use:
|
||||
<literallayout class='monospaced'>
|
||||
deltask printdate
|
||||
</literallayout>
|
||||
If you delete a task using the <filename>deltask</filename>
|
||||
command and the task has dependencies, the dependencies are
|
||||
not reconnected.
|
||||
For example, suppose you have three tasks named
|
||||
<filename>do_a</filename>, <filename>do_b</filename>, and
|
||||
<filename>do_c</filename>.
|
||||
Furthermore, <filename>do_c</filename> is dependent on
|
||||
<filename>do_b</filename>, which in turn is dependent on
|
||||
<filename>do_a</filename>.
|
||||
Given this scenario, if you use <filename>deltask</filename>
|
||||
to delete <filename>do_b</filename>, the implicit dependency
|
||||
relationship between <filename>do_c</filename> and
|
||||
<filename>do_a</filename> through <filename>do_b</filename>
|
||||
no longer exists, and <filename>do_c</filename> dependencies
|
||||
are not updated to include <filename>do_a</filename>.
|
||||
Thus, <filename>do_c</filename> is free to run before
|
||||
<filename>do_a</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you want dependencies such as these to remain intact, use
|
||||
the <filename>noexec</filename> varflag to disable the task
|
||||
instead of using the <filename>deltask</filename> command to
|
||||
delete it:
|
||||
<literallayout class='monospaced'>
|
||||
do_b[noexec] = "1"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1185,18 +1016,10 @@
|
||||
<title>Passing Information Into the Build Task Environment</title>
|
||||
|
||||
<para>
|
||||
When running a task, BitBake tightly controls the shell execution
|
||||
When running a task, BitBake tightly controls the execution
|
||||
environment of the build tasks to make
|
||||
sure unwanted contamination from the build machine cannot
|
||||
influence the build.
|
||||
<note>
|
||||
By default, BitBake cleans the environment to include only those
|
||||
things exported or listed in its whitelist to ensure that the build
|
||||
environment is reproducible and consistent.
|
||||
You can prevent this "cleaning" by setting the
|
||||
<link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
|
||||
variable.
|
||||
</note>
|
||||
Consequently, if you do want something to get passed into the
|
||||
build task environment, you must take these two steps:
|
||||
<orderedlist>
|
||||
@@ -1204,16 +1027,14 @@
|
||||
Tell BitBake to load what you want from the environment
|
||||
into the datastore.
|
||||
You can do so through the
|
||||
<link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
|
||||
and
|
||||
<link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
|
||||
variables.
|
||||
variable.
|
||||
For example, assume you want to prevent the build system from
|
||||
accessing your <filename>$HOME/.ccache</filename>
|
||||
directory.
|
||||
The following command "whitelists" the environment variable
|
||||
<filename>CCACHE_DIR</filename> causing BitBack to allow that
|
||||
variable into the datastore:
|
||||
The following command tells BitBake to load
|
||||
<filename>CCACHE_DIR</filename> from the environment into
|
||||
the datastore:
|
||||
<literallayout class='monospaced'>
|
||||
export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE CCACHE_DIR"
|
||||
</literallayout></para></listitem>
|
||||
@@ -1251,17 +1072,25 @@
|
||||
<para>
|
||||
The <filename>BB_ORIGENV</filename> variable returns a datastore
|
||||
object that can be queried using the standard datastore operators
|
||||
such as <filename>getVar(, False)</filename>.
|
||||
such as <filename>getVar()</filename>.
|
||||
The datastore object is useful, for example, to find the original
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BB_ORIGENV - add example?
|
||||
|
||||
origenv = d.getVar("BB_ORIGENV", False)
|
||||
bar = origenv.getVar("BAR", False)
|
||||
</literallayout>
|
||||
The previous example returns <filename>BAR</filename> from the original
|
||||
execution environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, BitBake cleans the environment to include only those
|
||||
things exported or listed in its whitelist to ensure that the build
|
||||
environment is reproducible and consistent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -1274,7 +1103,7 @@
|
||||
BitBake reads and writes varflags to the datastore using the following
|
||||
command forms:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>variable</replaceable> = d.getVarFlags("<replaceable>variable</replaceable>")
|
||||
<variable> = d.getVarFlags("<variable>")
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -1295,38 +1124,11 @@
|
||||
Tasks support a number of these flags which control various
|
||||
functionality of the task:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>dirs:</emphasis>
|
||||
Directories that should be created before the task runs.
|
||||
The last directory listed will be used as the work directory
|
||||
for the task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>lockfiles:</emphasis>
|
||||
Specifies one or more lockfiles to lock while the task
|
||||
executes.
|
||||
Only one task may hold a lockfile, and any task that
|
||||
attempts to lock an already locked file will block until
|
||||
the lock is released.
|
||||
You can use this variable flag to accomplish mutual
|
||||
exclusion.
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>noexec:</emphasis>
|
||||
Marks the tasks as being empty and no execution required.
|
||||
@@ -1338,20 +1140,21 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
<listitem><para><emphasis>fakeroot:</emphasis>
|
||||
Causes a task to be run in a fakeroot environment,
|
||||
obtained by adding the variables in
|
||||
<link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
|
||||
to the environment.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdeptask:</emphasis>
|
||||
@@ -1364,11 +1167,6 @@
|
||||
"<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recideptask:</emphasis>
|
||||
When set in conjunction with
|
||||
<filename>recrdeptask</filename>, specifies a task that
|
||||
should be inspected for additional dependencies.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recrdeptask:</emphasis>
|
||||
Controls task recursive runtime dependencies.
|
||||
See the
|
||||
@@ -1379,14 +1177,35 @@
|
||||
"<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>stamp-extra-info:</emphasis>
|
||||
Extra stamp information to append to the task's stamp.
|
||||
As an example, OpenEmbedded uses this flag to allow
|
||||
machine-specific tasks.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -1406,16 +1225,16 @@
|
||||
does not allow BitBake to automatically determine
|
||||
that the variable is referred to.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalue:</emphasis>
|
||||
If set, instructs BitBake to ignore the actual
|
||||
value of the variable and instead use the specified
|
||||
value when calculating the variable's signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalueexclude:</emphasis>
|
||||
Specifies a pipe-separated list of strings to exclude
|
||||
from the variable's value when calculating the
|
||||
@@ -1432,8 +1251,8 @@
|
||||
BitBake allows installation of event handlers within
|
||||
recipe and class files.
|
||||
Events are triggered at certain points during operation,
|
||||
such as the beginning of an operation against a given recipe
|
||||
(<filename>*.bb</filename> file), the start of a given task,
|
||||
such as the beginning of operation against a given
|
||||
<filename>.bb</filename>, the start of a given task,
|
||||
task failure, task success, and so forth.
|
||||
The intent is to make it easy to do things like email
|
||||
notification on build failure.
|
||||
@@ -1461,27 +1280,6 @@
|
||||
the name of the triggered event.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because you probably are only interested in a subset of events,
|
||||
you would likely use the <filename>[eventmask]</filename> flag
|
||||
for your event handler to be sure that only certain events
|
||||
trigger the handler.
|
||||
Given the previous example, suppose you only wanted the
|
||||
<filename>bb.build.TaskFailed</filename> event to trigger that
|
||||
event handler.
|
||||
Use the flag as follows:
|
||||
<literallayout class='monospaced'>
|
||||
addhandler myclass_eventhandler
|
||||
myclass_eventhandler[eventmask] = "bb.build.TaskFailed"
|
||||
python myclass_eventhandler() {
|
||||
from bb.event import getName
|
||||
from bb import data
|
||||
print("The name of the Event is %s" % getName(e))
|
||||
print("The file we run for is %s" % data.getVar('FILE', e.data, True))
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
During a standard build, the following common events might occur:
|
||||
<itemizedlist>
|
||||
@@ -1668,9 +1466,9 @@
|
||||
complete before that task can be executed.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_configure[deptask] = "do_populate_sysroot"
|
||||
do_configure[deptask] = "do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of each item in <filename>DEPENDS</filename> must complete before
|
||||
<filename>do_configure</filename> can execute.
|
||||
</para>
|
||||
@@ -1696,11 +1494,11 @@
|
||||
item runtime dependency which must have completed before that
|
||||
task can be executed.
|
||||
<literallayout class='monospaced'>
|
||||
do_package_qa[rdeptask] = "do_packagedata"
|
||||
do_package_write[rdeptask] = "do_package"
|
||||
</literallayout>
|
||||
In the previous example, the <filename>do_packagedata</filename>
|
||||
In the previous example, the <filename>do_package</filename>
|
||||
task of each item in <filename>RDEPENDS</filename> must have
|
||||
completed before <filename>do_package_qa</filename> can execute.
|
||||
completed before <filename>do_package_write</filename> can execute.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1745,9 +1543,9 @@
|
||||
the data in <filename>DEPENDS</filename>.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_patch[depends] = "quilt-native:do_populate_sysroot"
|
||||
do_patch[depends] = "quilt-native:do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of the target <filename>quilt-native</filename>
|
||||
must have completed before the
|
||||
<filename>do_patch</filename> task can execute.
|
||||
@@ -1849,10 +1647,6 @@
|
||||
<entry align="left"><filename>d.delVarFlags("X")</filename></entry>
|
||||
<entry align="left">Deletes all the flags for the variable "X".</entry>
|
||||
</row>
|
||||
<row>
|
||||
<entry align="left"><filename>d.expand(expression)</filename></entry>
|
||||
<entry align="left">Expands variable references in the specified string expression.</entry>
|
||||
</row>
|
||||
</tbody>
|
||||
</tgroup>
|
||||
</informaltable>
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
<link linkend='var-DEFAULT_PREFERENCE'>D</link>
|
||||
<link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
|
||||
<link linkend='var-FAKEROOT'>F</link>
|
||||
<link linkend='var-GITDIR'>G</link>
|
||||
<link linkend='var-HGDIR'>H</link>
|
||||
<!-- <link linkend='var-GROUPADD_PARAM'>G</link> -->
|
||||
<link linkend='var-HOMEPAGE'>H</link>
|
||||
<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
|
||||
<!-- <link linkend='var-glossary-j'>J</link> -->
|
||||
<!-- <link linkend='var-KARCH'>K</link> -->
|
||||
@@ -102,56 +102,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Specifies a space-delimited list of hosts that the fetcher
|
||||
is allowed to use to obtain the required source code.
|
||||
Following are considerations surrounding this variable:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
This host list is only used if
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
is either not set or set to "0".
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Limited support for wildcard matching against the
|
||||
beginning of host names exists.
|
||||
For example, the following setting matches
|
||||
<filename>git.gnu.org</filename>,
|
||||
<filename>ftp.gnu.org</filename>, and
|
||||
<filename>foo.git.gnu.org</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
BB_ALLOWED_NETWORKS = "*.gnu.org"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mirrors not in the host list are skipped and
|
||||
logged in debug.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Attempts to access networks not in the host list
|
||||
cause a failure.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Using <filename>BB_ALLOWED_NETWORKS</filename> in
|
||||
conjunction with
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
is very useful.
|
||||
Adding the host you want to use to
|
||||
<filename>PREMIRRORS</filename> results in the source code
|
||||
being fetched from an allowed location and avoids raising
|
||||
an error when a host that is not allowed is in a
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
statement.
|
||||
This is because the fetcher does not attempt to use the
|
||||
host listed in <filename>SRC_URI</filename> after a
|
||||
successful fetch from the
|
||||
<filename>PREMIRRORS</filename> occurs.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -856,56 +806,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows adjustment of a task's Input/Output priority.
|
||||
During Autobuilder testing, random failures can occur
|
||||
for tasks due to I/O starvation.
|
||||
These failures occur during various QEMU runtime timeouts.
|
||||
You can use the <filename>BB_TASK_IONICE_LEVEL</filename>
|
||||
variable to adjust the I/O priority of these tasks.
|
||||
<note>
|
||||
This variable works similarly to the
|
||||
<link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
|
||||
variable except with a task's I/O priorities.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Set the variable as follows:
|
||||
<literallayout class='monospaced'>
|
||||
BB_TASK_IONICE_LEVEL = "<replaceable>class</replaceable>.<replaceable>prio</replaceable>"
|
||||
</literallayout>
|
||||
For <replaceable>class</replaceable>, the default value is
|
||||
"2", which is a best effort.
|
||||
You can use "1" for realtime and "3" for idle.
|
||||
If you want to use realtime, you must have superuser
|
||||
privileges.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For <replaceable>prio</replaceable>, you can use any
|
||||
value from "0", which is the highest priority, to "7",
|
||||
which is the lowest.
|
||||
The default value is "4".
|
||||
You do not need any special privileges to use this range
|
||||
of priority values.
|
||||
<note>
|
||||
In order for your I/O priority settings to take effect,
|
||||
you need the Completely Fair Queuing (CFQ) Scheduler
|
||||
selected for the backing block device.
|
||||
To select the scheduler, use the following command form
|
||||
where <replaceable>device</replaceable> is the device
|
||||
(e.g. sda, sdb, and so forth):
|
||||
<literallayout class='monospaced'>
|
||||
$ sudo sh -c “echo cfq > /sys/block/<replaceable>device</replaceable>/queu/scheduler
|
||||
</literallayout>
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -972,7 +872,7 @@
|
||||
that run on the target <filename>MACHINE</filename>;
|
||||
"nativesdk", which targets the SDK machine instead of
|
||||
<filename>MACHINE</filename>; and "mulitlibs" in the form
|
||||
"<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
|
||||
"<filename>multilib:<multilib_name></filename>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -984,7 +884,7 @@
|
||||
metadata:
|
||||
<literallayout class='monospaced'>
|
||||
BBCLASSEXTEND =+ "native nativesdk"
|
||||
BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
|
||||
BBCLASSEXTEND =+ "multilib:<multilib_name>"
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1116,20 +1016,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the base location where layers are stored.
|
||||
By default, this location is set to
|
||||
<filename>${COREBASE}</filename>.
|
||||
This setting is used in conjunction with
|
||||
<filename>bitbake-layers layerindex-fetch</filename> and
|
||||
tells <filename>bitbake-layers</filename> where to place
|
||||
the fetched layers.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1142,14 +1028,14 @@
|
||||
to "hide" these <filename>.bb</filename> and
|
||||
<filename>.bbappend</filename> files.
|
||||
BitBake ignores any recipe or recipe append files that
|
||||
match any of the expressions.
|
||||
match the expression.
|
||||
It is as if BitBake does not see them at all.
|
||||
Consequently, matching files are not parsed or otherwise
|
||||
used by BitBake.</para>
|
||||
<para>
|
||||
The values you provide are passed to Python's regular
|
||||
The value you provide is passed to Python's regular
|
||||
expression compiler.
|
||||
The expressions are compared against the full paths to
|
||||
The expression is compared against the full paths to
|
||||
the files.
|
||||
For complete syntax information, see Python's
|
||||
documentation at
|
||||
@@ -1165,16 +1051,18 @@
|
||||
BBMASK = "meta-ti/recipes-misc/"
|
||||
</literallayout>
|
||||
If you want to mask out multiple directories or recipes,
|
||||
you can specify multiple regular expression fragments.
|
||||
use the vertical bar to separate the regular expression
|
||||
fragments.
|
||||
This next example masks out multiple directories and
|
||||
individual recipes:
|
||||
<literallayout class='monospaced'>
|
||||
BBMASK += "/meta-ti/recipes-misc/ meta-ti/recipes-ti/packagegroup/"
|
||||
BBMASK += "/meta-oe/recipes-support/"
|
||||
BBMASK += "/meta-foo/.*/openldap"
|
||||
BBMASK += "opencv.*\.bbappend"
|
||||
BBMASK += "lzma"
|
||||
BBMASK = "meta-ti/recipes-misc/|meta-ti/recipes-ti/packagegroup/"
|
||||
BBMASK .= "|.*meta-oe/recipes-support/"
|
||||
BBMASK .= "|.*openldap"
|
||||
BBMASK .= "|.*opencv"
|
||||
BBMASK .= "|.*lzma"
|
||||
</literallayout>
|
||||
Notice how the vertical bar is used to append the fragments.
|
||||
<note>
|
||||
When specifying a directory name, use the trailing
|
||||
slash character to ensure you match just that directory
|
||||
@@ -1203,9 +1091,9 @@
|
||||
Set the variable as you would any environment variable
|
||||
and then run BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>build_directory</replaceable>"
|
||||
$ BBPATH="<build_directory>"
|
||||
$ export BBPATH
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1221,15 +1109,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows you to use a configuration file to add to the list
|
||||
of command-line target recipes you want to build.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1275,15 +1154,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Bazaar
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-c'><title>C</title>
|
||||
@@ -1298,15 +1168,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out under the
|
||||
CVS system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-d'><title>D</title>
|
||||
@@ -1551,32 +1412,13 @@
|
||||
|
||||
</glossdiv>
|
||||
|
||||
|
||||
<!--
|
||||
<glossdiv id='var-glossary-g'><title>G</title>
|
||||
|
||||
<glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Git repository
|
||||
is stored when it is cloned.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
-->
|
||||
|
||||
<glossdiv id='var-glossary-h'><title>H</title>
|
||||
|
||||
<glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Mercurial
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
|
||||
<glossdef>
|
||||
<para>Website where more information about the software the recipe is building
|
||||
@@ -1718,16 +1560,8 @@
|
||||
BitBake uses <filename>OVERRIDES</filename> to control
|
||||
what variables are overridden after BitBake parses
|
||||
recipes and configuration files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following is a simple example that uses an overrides
|
||||
list based on machine architectures:
|
||||
<literallayout class='monospaced'>
|
||||
OVERRIDES = "arm:x86:mips:powerpc"
|
||||
</literallayout>
|
||||
You can find information on how to use
|
||||
<filename>OVERRIDES</filename> in the
|
||||
You can find more information on how overrides are handled
|
||||
in the
|
||||
"<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
|
||||
section.
|
||||
</para>
|
||||
@@ -1910,28 +1744,16 @@
|
||||
<glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
A list of aliases by which a particular recipe can be
|
||||
known.
|
||||
By default, a recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its <filename>PROVIDES</filename>
|
||||
list.
|
||||
If a recipe uses <filename>PROVIDES</filename>, the
|
||||
additional aliases are synonyms for the recipe and can
|
||||
be useful satisfying dependencies of other recipes during
|
||||
the build as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Consider the following example
|
||||
<filename>PROVIDES</filename> statement from a recipe
|
||||
file <filename>libav_0.8.11.bb</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "libpostproc"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
A list of aliases that a recipe also provides.
|
||||
These aliases are useful for satisfying dependencies of
|
||||
other recipes during the build (as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>).
|
||||
<note>
|
||||
A recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its
|
||||
<filename>PROVIDES</filename> list.
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
@@ -2009,7 +1831,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RDEPENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RDEPENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2075,7 +1897,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RRECOMMENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RRECOMMENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2258,15 +2080,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Subversion
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-t'><title>T</title>
|
||||
|
||||
@@ -313,13 +313,6 @@ a:hover {
|
||||
/*font-weight: bold;*/
|
||||
}
|
||||
|
||||
/* This style defines how the permalink character
|
||||
appears by itself and when hovered over with
|
||||
the mouse. */
|
||||
|
||||
[alt='Permalink'] { color: #eee; }
|
||||
[alt='Permalink']:hover { color: black; }
|
||||
|
||||
|
||||
div.informalfigure,
|
||||
div.informalexample,
|
||||
@@ -800,6 +793,7 @@ div.sect2 .titlepage .title {
|
||||
|
||||
h1.title {
|
||||
background-color: transparent;
|
||||
background-image: url("figures/yocto-project-bw.png");
|
||||
background-repeat: no-repeat;
|
||||
height: 256px;
|
||||
text-indent: -9000px;
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
-->
|
||||
|
||||
<copyright>
|
||||
<year>2004-2016</year>
|
||||
<year>2004-2014</year>
|
||||
<holder>Richard Purdie</holder>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>and Phil Blundell</holder>
|
||||
|
||||
@@ -89,7 +89,7 @@ quit after parsing the BB files (developers only)
|
||||
show current and preferred versions of all packages
|
||||
.TP
|
||||
.B \-e, \-\-environment
|
||||
show the global or per-recipe environment (this is what used to be bbread)
|
||||
show the global or per-package environment (this is what used to be bbread)
|
||||
.TP
|
||||
.B \-g, \-\-graphviz
|
||||
emit the dependency trees of the specified packages in the dot syntax
|
||||
|
||||
39
bitbake/doc/template/component.title.xsl
vendored
39
bitbake/doc/template/component.title.xsl
vendored
@@ -1,39 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="component.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="ancestor::d:section">
|
||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
||||
</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
||||
<xsl:otherwise>1</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:if test="$generate.id.attributes = 0">
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:element>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/division.title.xsl
vendored
25
bitbake/doc/template/division.title.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="division.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<h1>
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</h1>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
|
||||
21
bitbake/doc/template/formal.object.heading.xsl
vendored
21
bitbake/doc/template/formal.object.heading.xsl
vendored
@@ -1,21 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="formal.object.heading">
|
||||
<xsl:param name="object" select="."/>
|
||||
<xsl:param name="title">
|
||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
</xsl:param>
|
||||
<p class="title">
|
||||
<b><xsl:copy-of select="$title"/></b>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$object"/>
|
||||
</xsl:call-template>
|
||||
</p>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
@@ -1,14 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<xsl:template match="glossentry/glossterm">
|
||||
<xsl:apply-imports/>
|
||||
<xsl:if test="$generate.permalink != 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select=".."/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/permalinks.xsl
vendored
25
bitbake/doc/template/permalinks.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
|
||||
<xsl:param name="generate.permalink" select="1"/>
|
||||
<xsl:param name="permalink.text">¶</xsl:param>
|
||||
|
||||
<xsl:template name="permalink">
|
||||
<xsl:param name="node"/>
|
||||
|
||||
<xsl:if test="$generate.permalink != '0'">
|
||||
<span class="permalink">
|
||||
<a alt="Permalink" title="Permalink">
|
||||
<xsl:attribute name="href">
|
||||
<xsl:call-template name="href.target">
|
||||
<xsl:with-param name="object" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:attribute>
|
||||
<xsl:copy-of select="$permalink.text"/>
|
||||
</a>
|
||||
</span>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
55
bitbake/doc/template/section.title.xsl
vendored
55
bitbake/doc/template/section.title.xsl
vendored
@@ -1,55 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="section.title">
|
||||
<xsl:variable name="section"
|
||||
select="(ancestor::section |
|
||||
ancestor::simplesect|
|
||||
ancestor::sect1|
|
||||
ancestor::sect2|
|
||||
ancestor::sect3|
|
||||
ancestor::sect4|
|
||||
ancestor::sect5)[last()]"/>
|
||||
|
||||
<xsl:variable name="renderas">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
|
||||
<xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$renderas != ''">
|
||||
<xsl:value-of select="$renderas"/>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:call-template name="section.level">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:call-template name="section.heading">
|
||||
<xsl:with-param name="section" select="$section"/>
|
||||
<xsl:with-param name="level" select="$level"/>
|
||||
<xsl:with-param name="title">
|
||||
<xsl:apply-templates select="$section" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:if test="$level > 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:with-param>
|
||||
</xsl:call-template>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.31.0"
|
||||
__version__ = "1.22.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
@@ -70,8 +70,6 @@ logger = logging.getLogger("BitBake")
|
||||
logger.addHandler(NullHandler())
|
||||
logger.setLevel(logging.DEBUG - 2)
|
||||
|
||||
mainlogger = logging.getLogger("BitBake.Main")
|
||||
|
||||
# This has to be imported after the setLoggerClass, as the import of bb.msg
|
||||
# can result in construction of the various loggers.
|
||||
import bb.msg
|
||||
@@ -81,31 +79,32 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||
|
||||
# Messaging convenience functions
|
||||
def plain(*args):
|
||||
mainlogger.plain(''.join(args))
|
||||
logger.plain(''.join(args))
|
||||
|
||||
def debug(lvl, *args):
|
||||
if isinstance(lvl, basestring):
|
||||
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
args = (lvl,) + args
|
||||
lvl = 1
|
||||
mainlogger.debug(lvl, ''.join(args))
|
||||
logger.debug(lvl, ''.join(args))
|
||||
|
||||
def note(*args):
|
||||
mainlogger.info(''.join(args))
|
||||
logger.info(''.join(args))
|
||||
|
||||
def warn(*args):
|
||||
mainlogger.warning(''.join(args))
|
||||
logger.warn(''.join(args))
|
||||
|
||||
def error(*args, **kwargs):
|
||||
mainlogger.error(''.join(args), extra=kwargs)
|
||||
def error(*args):
|
||||
logger.error(''.join(args))
|
||||
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
sys.exit(1)
|
||||
|
||||
def fatal(*args, **kwargs):
|
||||
mainlogger.critical(''.join(args), extra=kwargs)
|
||||
raise BBHandledException()
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
"""This is a decorator which can be used to mark functions
|
||||
as deprecated. It will result in a warning being emitted
|
||||
as deprecated. It will result in a warning being emmitted
|
||||
when the function is used."""
|
||||
import warnings
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -31,7 +31,6 @@ import logging
|
||||
import shlex
|
||||
import glob
|
||||
import time
|
||||
import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
@@ -43,31 +42,13 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
__mtime_cache = {}
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if f not in __mtime_cache:
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def reset_cache():
|
||||
global __mtime_cache
|
||||
__mtime_cache = {}
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# When we execute a python function we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
builtins['bb'] = bb
|
||||
builtins['os'] = os
|
||||
__builtins__['bb'] = bb
|
||||
__builtins__['os'] = os
|
||||
|
||||
class FuncFailed(Exception):
|
||||
def __init__(self, name = None, logfile = None):
|
||||
@@ -161,18 +142,13 @@ class LogTee(object):
|
||||
def flush(self):
|
||||
self.outfile.flush()
|
||||
|
||||
#
|
||||
# pythonexception allows the python exceptions generated to be raised
|
||||
# as the real exceptions (not FuncFailed) and without a backtrace at the
|
||||
# origin of the failure.
|
||||
#
|
||||
def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
"""Execute a BB 'function'"""
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute an BB 'function'"""
|
||||
|
||||
body = d.getVar(func, False)
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warning("Function %s doesn't exist", func)
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
return
|
||||
|
||||
flags = d.getVarFlags(func)
|
||||
@@ -234,21 +210,25 @@ def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
|
||||
with bb.utils.fileslocked(lockfiles):
|
||||
if ispython:
|
||||
exec_func_python(func, d, runfile, cwd=adir, pythonexception=pythonexception)
|
||||
exec_func_python(func, d, runfile, cwd=adir)
|
||||
else:
|
||||
exec_func_shell(func, d, runfile, cwd=adir)
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}
|
||||
|
||||
{function}(d)
|
||||
"""
|
||||
logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
||||
def exec_func_python(func, d, runfile, cwd=None):
|
||||
"""Execute a python BB 'function'"""
|
||||
|
||||
code = _functionfmt.format(function=func)
|
||||
bbfile = d.getVar('FILE', True)
|
||||
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
with open(runfile, 'w') as script:
|
||||
bb.data.emit_func_python(func, script, d)
|
||||
script.write(code)
|
||||
|
||||
if cwd:
|
||||
try:
|
||||
@@ -260,18 +240,12 @@ def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
||||
bb.debug(2, "Executing python function %s" % func)
|
||||
|
||||
try:
|
||||
text = "def %s(d):\n%s" % (func, d.getVar(func, False))
|
||||
fn = d.getVarFlag(func, "filename", False)
|
||||
lineno = int(d.getVarFlag(func, "lineno", False))
|
||||
bb.methodpool.insert_method(func, text, fn, lineno - 1)
|
||||
|
||||
comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
|
||||
utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
|
||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
||||
raise
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
except:
|
||||
if pythonexception:
|
||||
if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
|
||||
raise
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
@@ -290,8 +264,9 @@ bb_exit_handler() {
|
||||
case $ret in
|
||||
0) ;;
|
||||
*) case $BASH_VERSION in
|
||||
"") echo "WARNING: exit code $ret from a shell command.";;
|
||||
*) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
|
||||
"") echo "WARNING: exit code $ret from a shell command.";;
|
||||
*) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from
|
||||
\"$BASH_COMMAND\"";;
|
||||
esac
|
||||
exit $ret
|
||||
esac
|
||||
@@ -325,13 +300,13 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
# cleanup
|
||||
ret=$?
|
||||
trap '' 0
|
||||
exit $ret
|
||||
exit $?
|
||||
''')
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot', False):
|
||||
if d.getVarFlag(func, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
@@ -341,52 +316,14 @@ exit $ret
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
def readfifo(data):
|
||||
lines = data.split('\0')
|
||||
for line in lines:
|
||||
splitval = line.split(' ', 1)
|
||||
cmd = splitval[0]
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1]
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
tempdir = d.getVar('T', True)
|
||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
||||
if os.path.exists(fifopath):
|
||||
os.unlink(fifopath)
|
||||
os.mkfifo(fifopath)
|
||||
with open(fifopath, 'r+') as fifo:
|
||||
try:
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
finally:
|
||||
os.unlink(fifopath)
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
|
||||
@@ -406,7 +343,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
Execution of a task involves a bit more setup than executing a function,
|
||||
running it with its own local metadata, and with some useful variables set.
|
||||
"""
|
||||
if not d.getVarFlag(task, 'task', False):
|
||||
if not d.getVarFlag(task, 'task'):
|
||||
event.fire(TaskInvalid(task, d), d)
|
||||
logger.error("No such task: %s" % task)
|
||||
return 1
|
||||
@@ -425,13 +362,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
nice = int(nice) - curnice
|
||||
newnice = os.nice(nice)
|
||||
logger.debug(1, "Renice to %s " % newnice)
|
||||
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
|
||||
if ionice:
|
||||
try:
|
||||
cls, prio = ionice.split(".", 1)
|
||||
bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
|
||||
except:
|
||||
bb.warn("Invalid ionice level %s" % ionice)
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
@@ -467,10 +397,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
if getattr(record, 'forcelog', False):
|
||||
self.triggered = False
|
||||
else:
|
||||
self.triggered = True
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = open('/dev/null', 'r')
|
||||
@@ -491,7 +418,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
os.dup2(logfile.fileno(), oso[1])
|
||||
os.dup2(logfile.fileno(), ose[1])
|
||||
|
||||
# Ensure Python logging goes to the logfile
|
||||
# Ensure python logging goes to the logfile
|
||||
handler = logging.StreamHandler(logfile)
|
||||
handler.setFormatter(logformatter)
|
||||
# Always enable full debug output into task logfiles
|
||||
@@ -545,7 +472,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bb.utils.remove(loglink)
|
||||
event.fire(TaskSucceeded(task, logfn, localdata), localdata)
|
||||
|
||||
if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
|
||||
if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
|
||||
make_stamp(task, localdata)
|
||||
|
||||
return 0
|
||||
@@ -553,7 +480,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
def exec_task(fn, task, d, profile = False):
|
||||
try:
|
||||
quieterr = False
|
||||
if d.getVarFlag(task, "quieterrors", False) is not None:
|
||||
if d.getVarFlag(task, "quieterrors") is not None:
|
||||
quieterr = True
|
||||
|
||||
if profile:
|
||||
@@ -580,7 +507,7 @@ def exec_task(fn, task, d, profile = False):
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
def stamp_internal(taskname, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Makes sure the stamp directory exists
|
||||
@@ -594,23 +521,20 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
taskflagname = taskname.replace("_setscene", "")
|
||||
|
||||
if file_name:
|
||||
stamp = d.stamp[file_name]
|
||||
stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
|
||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||
else:
|
||||
stamp = d.getVar('STAMP', True)
|
||||
stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if baseonly:
|
||||
return stamp
|
||||
|
||||
if not stamp:
|
||||
return
|
||||
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if cached_mtime_noerror(stampdir) == 0:
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
|
||||
return stamp
|
||||
@@ -628,10 +552,10 @@ def stamp_cleanmask_internal(taskname, d, file_name):
|
||||
taskflagname = taskname.replace("_setscene", "")
|
||||
|
||||
if file_name:
|
||||
stamp = d.stampclean[file_name]
|
||||
stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
|
||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||
else:
|
||||
stamp = d.getVar('STAMPCLEAN', True)
|
||||
stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
@@ -668,9 +592,8 @@ def make_stamp(task, d, file_name = None):
|
||||
# If we're in task context, write out a signature file for each task
|
||||
# as it completes
|
||||
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
||||
stampbase = stamp_internal(task, d, None, True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
|
||||
|
||||
def del_stamp(task, d, file_name = None):
|
||||
"""
|
||||
@@ -705,8 +628,8 @@ def stampfile(taskname, d, file_name = None):
|
||||
"""
|
||||
return stamp_internal(taskname, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, d):
|
||||
task_deps = d.getVar('_task_deps', False)
|
||||
def add_tasks(tasklist, deltasklist, d):
|
||||
task_deps = d.getVar('_task_deps')
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
@@ -717,6 +640,9 @@ def add_tasks(tasklist, d):
|
||||
for task in tasklist:
|
||||
task = d.expand(task)
|
||||
|
||||
if task in deltasklist:
|
||||
continue
|
||||
|
||||
d.setVarFlag(task, 'task', 1)
|
||||
|
||||
if not task in task_deps['tasks']:
|
||||
@@ -753,12 +679,12 @@ def addtask(task, before, after, d):
|
||||
task = "do_" + task
|
||||
|
||||
d.setVarFlag(task, "task", 1)
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task not in bbtasks:
|
||||
bbtasks = d.getVar('__BBTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
|
||||
existing = d.getVarFlag(task, "deps", False) or []
|
||||
existing = d.getVarFlag(task, "deps") or []
|
||||
if after is not None:
|
||||
# set up deps for function
|
||||
for entry in after.split():
|
||||
@@ -768,7 +694,7 @@ def addtask(task, before, after, d):
|
||||
if before is not None:
|
||||
# set up things that depend on this func
|
||||
for entry in before.split():
|
||||
existing = d.getVarFlag(entry, "deps", False) or []
|
||||
existing = d.getVarFlag(entry, "deps") or []
|
||||
if task not in existing:
|
||||
d.setVarFlag(entry, "deps", [task] + existing)
|
||||
|
||||
@@ -776,14 +702,8 @@ def deltask(task, d):
|
||||
if task[:3] != "do_":
|
||||
task = "do_" + task
|
||||
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task in bbtasks:
|
||||
bbtasks.remove(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
bbtasks = d.getVar('__BBDELTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBDELTASKS', bbtasks)
|
||||
|
||||
d.delVarFlag(task, 'deps')
|
||||
for bbtask in d.getVar('__BBTASKS', False) or []:
|
||||
deps = d.getVarFlag(bbtask, 'deps', False) or []
|
||||
if task in deps:
|
||||
deps.remove(task)
|
||||
d.setVarFlag(bbtask, 'deps', deps)
|
||||
|
||||
@@ -43,7 +43,7 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "149"
|
||||
__cache_version__ = "147"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -85,8 +85,8 @@ class RecipeInfoCommon(object):
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata, expand = True):
|
||||
return metadata.getVar(var, expand) or ''
|
||||
def getvar(cls, var, metadata):
|
||||
return metadata.getVar(var, True) or ''
|
||||
|
||||
|
||||
class CoreRecipeInfo(RecipeInfoCommon):
|
||||
@@ -99,7 +99,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.timestamp = bb.parse.cached_mtime(filename)
|
||||
self.variants = self.listvar('__VARIANTS', metadata) + ['']
|
||||
self.appends = self.listvar('__BBAPPEND', metadata)
|
||||
self.nocache = self.getvar('BB_DONT_CACHE', metadata)
|
||||
self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
|
||||
|
||||
self.skipreason = self.getvar('__SKIPPED', metadata)
|
||||
if self.skipreason:
|
||||
@@ -129,6 +129,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
|
||||
self.stamp = self.getvar('STAMP', metadata)
|
||||
self.stampclean = self.getvar('STAMPCLEAN', metadata)
|
||||
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||
self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
|
||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||
@@ -140,11 +142,10 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
|
||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
||||
self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
|
||||
|
||||
@classmethod
|
||||
def init_cacheData(cls, cachedata):
|
||||
@@ -157,6 +158,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
|
||||
cachedata.stamp = {}
|
||||
cachedata.stampclean = {}
|
||||
cachedata.stamp_base = {}
|
||||
cachedata.stamp_base_clean = {}
|
||||
cachedata.stamp_extrainfo = {}
|
||||
cachedata.file_checksums = {}
|
||||
cachedata.fn_provides = {}
|
||||
@@ -180,7 +183,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.fakerootenv = {}
|
||||
cachedata.fakerootnoenv = {}
|
||||
cachedata.fakerootdirs = {}
|
||||
cachedata.extradepsfunc = {}
|
||||
|
||||
def add_cacheData(self, cachedata, fn):
|
||||
cachedata.task_deps[fn] = self.task_deps
|
||||
@@ -190,6 +192,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.pkg_dp[fn] = self.defaultpref
|
||||
cachedata.stamp[fn] = self.stamp
|
||||
cachedata.stampclean[fn] = self.stampclean
|
||||
cachedata.stamp_base[fn] = self.stamp_base
|
||||
cachedata.stamp_base_clean[fn] = self.stamp_base_clean
|
||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||
cachedata.file_checksums[fn] = self.file_checksums
|
||||
|
||||
@@ -216,22 +220,19 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
rprovides += self.rprovides_pkg[package]
|
||||
|
||||
for rprovide in rprovides:
|
||||
if fn not in cachedata.rproviders[rprovide]:
|
||||
cachedata.rproviders[rprovide].append(fn)
|
||||
cachedata.rproviders[rprovide].append(fn)
|
||||
|
||||
for package in self.packages_dynamic:
|
||||
cachedata.packages_dynamic[package].append(fn)
|
||||
|
||||
# Build hash of runtime depends and recommends
|
||||
# Build hash of runtime depends and rececommends
|
||||
for package in self.packages + [self.pn]:
|
||||
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
||||
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if self.not_world:
|
||||
logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
|
||||
else:
|
||||
if not self.not_world:
|
||||
cachedata.possible_world.append(fn)
|
||||
|
||||
# create a collection of all targets for sanity checking
|
||||
@@ -248,7 +249,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.fakerootenv[fn] = self.fakerootenv
|
||||
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
|
||||
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
||||
cachedata.extradepsfunc[fn] = self.extradepsfunc
|
||||
|
||||
|
||||
|
||||
@@ -259,7 +259,7 @@ class Cache(object):
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used later for deciding whether we
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
@@ -339,7 +339,7 @@ class Cache(object):
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if key in self.depends_cache:
|
||||
if self.depends_cache.has_key(key):
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
@@ -526,25 +526,9 @@ class Cache(object):
|
||||
|
||||
if hasattr(info_array[0], 'file_checksums'):
|
||||
for _, fl in info_array[0].file_checksums.items():
|
||||
fl = fl.strip()
|
||||
while fl:
|
||||
# A .split() would be simpler but means spaces or colons in filenames would break
|
||||
a = fl.find(":True")
|
||||
b = fl.find(":False")
|
||||
if ((a < 0) and b) or ((b > 0) and (b < a)):
|
||||
f = fl[:b+6]
|
||||
fl = fl[b+7:]
|
||||
elif ((b < 0) and a) or ((a > 0) and (a < b)):
|
||||
f = fl[:a+5]
|
||||
fl = fl[a+6:]
|
||||
else:
|
||||
break
|
||||
fl = fl.strip()
|
||||
if "*" in f:
|
||||
continue
|
||||
f, exist = f.split(":")
|
||||
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s changed",
|
||||
for f in fl.split():
|
||||
if not ('*' in f or os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s was removed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
@@ -634,13 +618,10 @@ class Cache(object):
|
||||
def mtime(cachefile):
|
||||
return bb.parse.cached_mtime_noerror(cachefile)
|
||||
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None):
|
||||
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
|
||||
cacheData.add_from_recipeinfo(filename, info_array)
|
||||
|
||||
if watcher:
|
||||
watcher(info_array[0].file_depends)
|
||||
|
||||
if not self.has_cache:
|
||||
return
|
||||
|
||||
@@ -670,25 +651,25 @@ class Cache(object):
|
||||
"""
|
||||
chdir_back = False
|
||||
|
||||
from bb import parse
|
||||
from bb import data, parse
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
parse.cached_mtime_noerror(bbfile_loc)
|
||||
bb_data = config.createCopy()
|
||||
bb_data = data.init_db(config)
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not bb_data.getVar('TOPDIR', False):
|
||||
if not data.getVar('TOPDIR', bb_data):
|
||||
chdir_back = True
|
||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
||||
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||
try:
|
||||
if appends:
|
||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
||||
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||
bb_data = parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
@@ -711,7 +692,7 @@ def init(cooker):
|
||||
|
||||
* Its mtime
|
||||
* The mtimes of all its dependencies
|
||||
* Whether it caused a parse.SkipRecipe exception
|
||||
* Whether it caused a parse.SkipPackage exception
|
||||
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
@@ -755,14 +736,13 @@ class MultiProcessCache(object):
|
||||
self.cachedata = self.create_cachedata()
|
||||
self.cachedata_extras = self.create_cachedata()
|
||||
|
||||
def init_cache(self, d, cache_file_name=None):
|
||||
def init_cache(self, d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir,
|
||||
cache_file_name or self.__class__.cache_file_name)
|
||||
self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
@@ -782,11 +762,21 @@ class MultiProcessCache(object):
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
|
||||
def save_extras(self):
|
||||
def save_extras(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
@@ -816,13 +806,21 @@ class MultiProcessCache(object):
|
||||
if h not in dest[j]:
|
||||
dest[j][h] = source[j][h]
|
||||
|
||||
def save_merge(self):
|
||||
def save_merge(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
data = self.cachedata
|
||||
try:
|
||||
with open(self.cachefile, "rb") as f:
|
||||
p = pickle.Unpickler(f)
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
@@ -831,16 +829,16 @@ class MultiProcessCache(object):
|
||||
p = pickle.Unpickler(fd)
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
os.unlink(f)
|
||||
continue
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
os.unlink(f)
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
with open(self.cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
@@ -15,8 +15,6 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glob
|
||||
import operator
|
||||
import os
|
||||
import stat
|
||||
import bb.utils
|
||||
@@ -90,50 +88,3 @@ class FileChecksumCache(MultiProcessCache):
|
||||
dest[0][h] = source[0][h]
|
||||
else:
|
||||
dest[0][h] = source[0][h]
|
||||
|
||||
def get_checksums(self, filelist, pn):
|
||||
"""Get checksums for a list of files"""
|
||||
|
||||
def checksum_file(f):
|
||||
try:
|
||||
checksum = self.get_checksum(f)
|
||||
except OSError as e:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
||||
return None
|
||||
return checksum
|
||||
|
||||
def checksum_dir(pth):
|
||||
# Handle directories recursively
|
||||
dirchecksums = []
|
||||
for root, dirs, files in os.walk(pth):
|
||||
for name in files:
|
||||
fullpth = os.path.join(root, name)
|
||||
checksum = checksum_file(fullpth)
|
||||
if checksum:
|
||||
dirchecksums.append((fullpth, checksum))
|
||||
return dirchecksums
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
exist = pth.split(":")[1]
|
||||
if exist == "False":
|
||||
continue
|
||||
pth = pth.split(":")[0]
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
for f in glob.glob(pth):
|
||||
if os.path.isdir(f):
|
||||
if not os.path.islink(f):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
if not os.path.islink(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
return checksums
|
||||
|
||||
@@ -28,96 +28,14 @@ def check_indent(codestr):
|
||||
return codestr
|
||||
|
||||
if codestr[i-1] == "\t" or codestr[i-1] == " ":
|
||||
if codestr[0] == "\n":
|
||||
# Since we're adding a line, we need to remove one line of any empty padding
|
||||
# to ensure line numbers are correct
|
||||
codestr = codestr[1:]
|
||||
return "if 1:\n" + codestr
|
||||
|
||||
return codestr
|
||||
|
||||
|
||||
# Basically pickle, in python 2.7.3 at least, does badly with data duplication
|
||||
# upon pickling and unpickling. Combine this with duplicate objects and things
|
||||
# are a mess.
|
||||
#
|
||||
# When the sets are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file.
|
||||
#
|
||||
# By having shell and python cacheline objects with setstate/getstate, we force
|
||||
# the object creation through our own routine where we can call intern (via internSet).
|
||||
#
|
||||
# We also use hashable frozensets and ensure we use references to these so that
|
||||
# duplicates can be removed, both in memory and in the resulting pickled data.
|
||||
#
|
||||
# By playing these games, the size of the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. Smaller cache files, faster load times and lower memory usage is good.
|
||||
#
|
||||
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
|
||||
# avoiding duplication of the attribute names!
|
||||
|
||||
class SetCache(object):
|
||||
def __init__(self):
|
||||
self.setcache = {}
|
||||
|
||||
def internSet(self, items):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
|
||||
class pythonCacheLine(object):
|
||||
def __init__(self, refs, execs, contains):
|
||||
self.refs = codecache.internSet(refs)
|
||||
self.execs = codecache.internSet(execs)
|
||||
self.contains = {}
|
||||
for c in contains:
|
||||
self.contains[c] = codecache.internSet(contains[c])
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.refs, self.execs, self.contains)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(refs, execs, contains) = state
|
||||
self.__init__(refs, execs, contains)
|
||||
def __hash__(self):
|
||||
l = (hash(self.refs), hash(self.execs))
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
def __repr__(self):
|
||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
||||
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
self.execs = codecache.internSet(execs)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.execs)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(execs) = state
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
def __repr__(self):
|
||||
return str(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 7
|
||||
CACHE_VERSION = 6
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -126,38 +44,32 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
# To avoid duplication in the codeparser cache, keep
|
||||
# a lookup of hashes of objects we already have
|
||||
self.pythoncachelines = {}
|
||||
self.shellcachelines = {}
|
||||
|
||||
def newPythonCacheLine(self, refs, execs, contains):
|
||||
cacheline = pythonCacheLine(refs, execs, contains)
|
||||
h = hash(cacheline)
|
||||
if h in self.pythoncachelines:
|
||||
return self.pythoncachelines[h]
|
||||
self.pythoncachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def newShellCacheLine(self, execs):
|
||||
cacheline = shellCacheLine(execs)
|
||||
h = hash(cacheline)
|
||||
if h in self.shellcachelines:
|
||||
return self.shellcachelines[h]
|
||||
self.shellcachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def init_cache(self, d):
|
||||
# Check if we already have the caches
|
||||
if self.pythoncache:
|
||||
return
|
||||
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for k in data[0][h]["contains"]:
|
||||
data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
@@ -167,11 +79,11 @@ codeparsercache = CodeParserCache()
|
||||
def parser_cache_init(d):
|
||||
codeparsercache.init_cache(d)
|
||||
|
||||
def parser_cache_save():
|
||||
codeparsercache.save_extras()
|
||||
def parser_cache_save(d):
|
||||
codeparsercache.save_extras(d)
|
||||
|
||||
def parser_cache_savemerge():
|
||||
codeparsercache.save_merge()
|
||||
def parser_cache_savemerge(d):
|
||||
codeparsercache.save_merge(d)
|
||||
|
||||
Logger = logging.getLoggerClass()
|
||||
class BufferedLogger(Logger):
|
||||
@@ -191,7 +103,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -221,17 +133,6 @@ class PythonParser():
|
||||
self.references.add(node.args[0].s)
|
||||
else:
|
||||
self.warn(node.func, node.args[0])
|
||||
elif name and name.endswith(".expand"):
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
value = node.args[0].s
|
||||
d = bb.data.init()
|
||||
parser = d.expandWithRefs(value, self.name)
|
||||
self.references |= parser.references
|
||||
self.execs |= parser.execs
|
||||
for varname in parser.contains:
|
||||
if varname not in self.contains:
|
||||
self.contains[varname] = set()
|
||||
self.contains[varname] |= parser.contains[varname]
|
||||
elif name in self.execfuncs:
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
self.var_execs.add(node.args[0].s)
|
||||
@@ -254,7 +155,6 @@ class PythonParser():
|
||||
break
|
||||
|
||||
def __init__(self, name, log):
|
||||
self.name = name
|
||||
self.var_execs = set()
|
||||
self.contains = {}
|
||||
self.execs = set()
|
||||
@@ -264,31 +164,22 @@ class PythonParser():
|
||||
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
|
||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||
|
||||
def parse_python(self, node, lineno=0, filename="<string>"):
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
def parse_python(self, node):
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncache[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncache[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncache[h]["contains"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = set(codeparsercache.pythoncacheextras[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncacheextras[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncacheextras[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncacheextras[h]["contains"]
|
||||
return
|
||||
|
||||
# We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
|
||||
node = "\n" * int(lineno) + node
|
||||
code = compile(check_indent(str(node)), filename, "exec",
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
ast.PyCF_ONLY_AST)
|
||||
|
||||
for n in ast.walk(code):
|
||||
@@ -297,7 +188,10 @@ class PythonParser():
|
||||
|
||||
self.execs.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
codeparsercache.pythoncacheextras[h]["contains"] = self.contains
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -316,17 +210,18 @@ class ShellParser():
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = set(codeparsercache.shellcacheextras[h].execs)
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
self._parse_shell(value)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -68,12 +68,10 @@ class Command:
|
||||
if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
|
||||
return None, "Not able to execute not readonly commands in readonly mode"
|
||||
try:
|
||||
if getattr(command_method, 'needconfig', False):
|
||||
self.cooker.updateCacheSync()
|
||||
result = command_method(self, commandline)
|
||||
except CommandError as exc:
|
||||
return None, exc.args[0]
|
||||
except (Exception, SystemExit):
|
||||
except Exception:
|
||||
import traceback
|
||||
return None, traceback.format_exc()
|
||||
else:
|
||||
@@ -88,10 +86,7 @@ class Command:
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
|
||||
# updateCache will trigger a shutdown of the parser
|
||||
# and then raise BBHandledException triggering an exit
|
||||
self.cooker.updateCache()
|
||||
if self.cooker.state == bb.cooker.state.error:
|
||||
return False
|
||||
if self.currentAsyncCommand is not None:
|
||||
(command, options) = self.currentAsyncCommand
|
||||
@@ -125,11 +120,11 @@ class Command:
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
if msg or msg == "":
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.event_data)
|
||||
elif code:
|
||||
bb.event.fire(CommandExit(code), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandExit(code), self.cooker.event_data)
|
||||
else:
|
||||
bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandCompleted(), self.cooker.event_data)
|
||||
self.currentAsyncCommand = None
|
||||
self.cooker.finishcommand()
|
||||
|
||||
@@ -181,16 +176,6 @@ class CommandsSync:
|
||||
value = str(params[1])
|
||||
command.cooker.data.setVar(varname, value)
|
||||
|
||||
def getSetVariable(self, command, params):
|
||||
"""
|
||||
Read the value of a variable from data and set it into the datastore
|
||||
which effectively expands and locks the value.
|
||||
"""
|
||||
varname = params[0]
|
||||
result = self.getVariable(command, params)
|
||||
command.cooker.data.setVar(varname, result)
|
||||
return result
|
||||
|
||||
def setConfig(self, command, params):
|
||||
"""
|
||||
Set the value of variable in configuration
|
||||
@@ -216,7 +201,6 @@ class CommandsSync:
|
||||
postfiles = params[1].split()
|
||||
command.cooker.configuration.prefile = prefiles
|
||||
command.cooker.configuration.postfile = postfiles
|
||||
setPrePostConfFiles.needconfig = False
|
||||
|
||||
def getCpuCount(self, command, params):
|
||||
"""
|
||||
@@ -224,12 +208,10 @@ class CommandsSync:
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
getCpuCount.readonly = True
|
||||
getCpuCount.needconfig = False
|
||||
|
||||
def matchFile(self, command, params):
|
||||
fMatch = params[0]
|
||||
return command.cooker.matchFile(fMatch)
|
||||
matchFile.needconfig = False
|
||||
|
||||
def generateNewImage(self, command, params):
|
||||
image = params[0]
|
||||
@@ -243,7 +225,6 @@ class CommandsSync:
|
||||
def ensureDir(self, command, params):
|
||||
directory = params[0]
|
||||
bb.utils.mkdirhier(directory)
|
||||
ensureDir.needconfig = False
|
||||
|
||||
def setVarFile(self, command, params):
|
||||
"""
|
||||
@@ -254,7 +235,6 @@ class CommandsSync:
|
||||
default_file = params[2]
|
||||
op = params[3]
|
||||
command.cooker.modifyConfigurationVar(var, val, default_file, op)
|
||||
setVarFile.needconfig = False
|
||||
|
||||
def removeVarFile(self, command, params):
|
||||
"""
|
||||
@@ -262,7 +242,6 @@ class CommandsSync:
|
||||
"""
|
||||
var = params[0]
|
||||
command.cooker.removeConfigurationVar(var)
|
||||
removeVarFile.needconfig = False
|
||||
|
||||
def createConfigFile(self, command, params):
|
||||
"""
|
||||
@@ -270,7 +249,6 @@ class CommandsSync:
|
||||
"""
|
||||
name = params[0]
|
||||
command.cooker.createConfigFile(name)
|
||||
createConfigFile.needconfig = False
|
||||
|
||||
def setEventMask(self, command, params):
|
||||
handlerNum = params[0]
|
||||
@@ -278,8 +256,6 @@ class CommandsSync:
|
||||
debug_domains = params[2]
|
||||
mask = params[3]
|
||||
return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
|
||||
setEventMask.needconfig = False
|
||||
setEventMask.readonly = True
|
||||
|
||||
def setFeatures(self, command, params):
|
||||
"""
|
||||
@@ -287,17 +263,11 @@ class CommandsSync:
|
||||
"""
|
||||
features = params[0]
|
||||
command.cooker.setFeatures(features)
|
||||
setFeatures.needconfig = False
|
||||
|
||||
# although we change the internal state of the cooker, this is transparent since
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
updateConfig.needconfig = False
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ConfigParameters(object):
|
||||
def __init__(self, argv=sys.argv):
|
||||
self.options, targets = self.parseCommandLine(argv)
|
||||
def __init__(self):
|
||||
self.options, targets = self.parseCommandLine()
|
||||
self.environment = self.parseEnvironment()
|
||||
|
||||
self.options.pkgs_to_build = targets or []
|
||||
@@ -46,7 +46,7 @@ class ConfigParameters(object):
|
||||
for key, val in self.options.__dict__.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
raise Exception("Caller must implement commandline option parsing")
|
||||
|
||||
def parseEnvironment(self):
|
||||
@@ -63,24 +63,12 @@ class ConfigParameters(object):
|
||||
raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
|
||||
|
||||
if not self.options.pkgs_to_build:
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
|
||||
if error:
|
||||
raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
|
||||
raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server, environment):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile",
|
||||
"prefile", "postfile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
def parseActions(self):
|
||||
# Parse any commandline into actions
|
||||
action = {'action':None, 'msg':None}
|
||||
@@ -129,21 +117,17 @@ class CookerConfiguration(object):
|
||||
self.extra_assume_provided = []
|
||||
self.prefile = []
|
||||
self.postfile = []
|
||||
self.prefile_server = []
|
||||
self.postfile_server = []
|
||||
self.debug = 0
|
||||
self.cmd = None
|
||||
self.abort = True
|
||||
self.force = False
|
||||
self.profile = False
|
||||
self.nosetscene = False
|
||||
self.setsceneonly = False
|
||||
self.invalidate_stamp = False
|
||||
self.dump_signatures = []
|
||||
self.dry_run = False
|
||||
self.tracking = False
|
||||
self.interface = []
|
||||
self.writeeventlog = False
|
||||
|
||||
self.env = {}
|
||||
|
||||
@@ -177,26 +161,11 @@ def catch_parse_error(func):
|
||||
def wrapped(fn, *args):
|
||||
try:
|
||||
return func(fn, *args)
|
||||
except IOError as exc:
|
||||
except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
import traceback
|
||||
parselog.critical(traceback.format_exc())
|
||||
parselog.critical( traceback.format_exc())
|
||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
||||
sys.exit(1)
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
import traceback
|
||||
|
||||
bbdir = os.path.dirname(__file__) + os.sep
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
for tb in iter(lambda: tb.tb_next, None):
|
||||
# Skip frames in bitbake itself, we only want the metadata
|
||||
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
|
||||
if not fn.startswith(bbdir):
|
||||
break
|
||||
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
|
||||
sys.exit(1)
|
||||
except bb.parse.ParseError as exc:
|
||||
parselog.critical(str(exc))
|
||||
sys.exit(1)
|
||||
return wrapped
|
||||
|
||||
@catch_parse_error
|
||||
@@ -258,13 +227,10 @@ class CookerDataBuilder(object):
|
||||
try:
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
except SyntaxError:
|
||||
raise bb.BBHandledException
|
||||
except bb.data_smart.ExpansionError as e:
|
||||
logger.error(str(e))
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
logger.exception("Error parsing configuration files")
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
|
||||
def _findLayerConf(self, data):
|
||||
return findConfigFile("bblayers.conf", data)
|
||||
@@ -288,13 +254,8 @@ class CookerDataBuilder(object):
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
if layer.endswith('/'):
|
||||
layer = layer.rstrip('/')
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
@@ -322,17 +283,15 @@ class CookerDataBuilder(object):
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
||||
handlerfn = data.getVarFlag(var, "filename", False)
|
||||
handlerln = int(data.getVarFlag(var, "lineno", False))
|
||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
|
||||
if data.getVar("BB_INVALIDCONF", False) is True:
|
||||
if data.getVar("BB_INVALIDCONF") is True:
|
||||
data.setVar("BB_INVALIDCONF", False)
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Python Daemonizing helper
|
||||
Python Deamonizing helper
|
||||
|
||||
Configurable daemon behaviors:
|
||||
|
||||
@@ -12,11 +12,8 @@ A failed call to fork() now raises an exception.
|
||||
|
||||
References:
|
||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||
http://www.apuebook.com/apue3e.html
|
||||
2) The Linux Programming Interface: Michael Kerrisk
|
||||
http://man7.org/tlpi/index.html
|
||||
3) Unix Programming Frequently Asked Questions:
|
||||
http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
||||
2) Unix Programming Frequently Asked Questions:
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||
|
||||
Modified to allow a function to be daemonized and return for
|
||||
bitbake use by Richard Purdie
|
||||
@@ -28,7 +25,7 @@ __version__ = "0.2"
|
||||
|
||||
# Standard Python modules.
|
||||
import os # Miscellaneous OS interfaces.
|
||||
import sys # System-specific parameters and functions.
|
||||
import sys # System-specific parameters and functions.
|
||||
|
||||
# Default daemon parameters.
|
||||
# File mode creation mask of the daemon.
|
||||
@@ -131,7 +128,7 @@ def createDaemon(function, logfile):
|
||||
# of methods to accomplish this task. Three are listed below.
|
||||
#
|
||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||
# number of open file descriptors to close. If it doesn't exist, use
|
||||
# number of open file descriptors to close. If it doesn't exists, use
|
||||
# the default value (configurable).
|
||||
#
|
||||
# try:
|
||||
@@ -149,7 +146,7 @@ def createDaemon(function, logfile):
|
||||
# OR
|
||||
#
|
||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||
# that can be opened by this process. If there is no limit on the
|
||||
# that can be opened by this process. If there is not limit on the
|
||||
# resource, use the default value.
|
||||
#
|
||||
import resource # Resource usage information.
|
||||
@@ -178,8 +175,8 @@ def createDaemon(function, logfile):
|
||||
# os.dup2(0, 2) # standard error (2)
|
||||
|
||||
|
||||
si = open('/dev/null', 'r')
|
||||
so = open(logfile, 'w')
|
||||
si = file('/dev/null', 'r')
|
||||
so = file(logfile, 'w')
|
||||
se = so
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ BitBake 'Data' implementations
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
The expandKeys and update_data are the most expensive
|
||||
The expandData and update_data are the most expensive
|
||||
operations. At night the cookie monster came by and
|
||||
suggested 'give me cookies on setting the variables and
|
||||
things will work out'. Taking this suggestion into account
|
||||
@@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
|
||||
monster seems to be right. We will track setVar more carefully
|
||||
to have faster update_data and expandKeys operations.
|
||||
|
||||
This is a trade-off between speed and memory again but
|
||||
This is a treade-off between speed and memory again but
|
||||
the speed is more critical here.
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@ the speed is more critical here.
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import sys, os, re
|
||||
if sys.argv[0][-5:] == "pydoc":
|
||||
@@ -84,7 +84,7 @@ def setVar(var, value, d):
|
||||
d.setVar(var, value)
|
||||
|
||||
|
||||
def getVar(var, d, exp = False):
|
||||
def getVar(var, d, exp = 0):
|
||||
"""Gets the value of a variable"""
|
||||
return d.getVar(var, exp)
|
||||
|
||||
@@ -107,7 +107,7 @@ def setVarFlag(var, flag, flagvalue, d):
|
||||
|
||||
def getVarFlag(var, flag, d):
|
||||
"""Gets given flag from given var"""
|
||||
return d.getVarFlag(var, flag, False)
|
||||
return d.getVarFlag(var, flag)
|
||||
|
||||
def delVarFlag(var, flag, d):
|
||||
"""Removes a given flag from the variable's flags"""
|
||||
@@ -159,12 +159,13 @@ def expandKeys(alterdata, readdata = None):
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
for key in sorted(todolist):
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
newval = alterdata.getVar(ekey, False)
|
||||
if newval is not None:
|
||||
val = alterdata.getVar(key, False)
|
||||
if val is not None:
|
||||
newval = alterdata.getVar(ekey, 0)
|
||||
if newval:
|
||||
val = alterdata.getVar(key, 0)
|
||||
if val is not None and newval is not None:
|
||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
||||
alterdata.renameVar(key, ekey)
|
||||
|
||||
@@ -174,7 +175,7 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
for s in savedenv.keys():
|
||||
if s in permitted:
|
||||
try:
|
||||
d.setVar(s, savedenv.getVar(s, True), op = 'from env')
|
||||
d.setVar(s, getVar(s, savedenv, True), op = 'from env')
|
||||
if s in exportlist:
|
||||
d.setVarFlag(s, "export", True, op = 'auto env export')
|
||||
except TypeError:
|
||||
@@ -182,52 +183,44 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
func = d.getVarFlag(var, "func", False)
|
||||
if d.getVarFlag(var, 'python', False) and func:
|
||||
return False
|
||||
if getVarFlag(var, "python", d):
|
||||
return 0
|
||||
|
||||
export = d.getVarFlag(var, "export", False)
|
||||
unexport = d.getVarFlag(var, "unexport", False)
|
||||
export = getVarFlag(var, "export", d)
|
||||
unexport = getVarFlag(var, "unexport", d)
|
||||
func = getVarFlag(var, "func", d)
|
||||
if not all and not export and not unexport and not func:
|
||||
return False
|
||||
return 0
|
||||
|
||||
try:
|
||||
if all:
|
||||
oval = d.getVar(var, False)
|
||||
val = d.getVar(var, True)
|
||||
oval = getVar(var, d, 0)
|
||||
val = getVar(var, d, 1)
|
||||
except (KeyboardInterrupt, bb.build.FuncFailed):
|
||||
raise
|
||||
except Exception as exc:
|
||||
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
||||
return False
|
||||
return 0
|
||||
|
||||
if all:
|
||||
d.varhistory.emit(var, oval, val, o, d)
|
||||
d.varhistory.emit(var, oval, val, o)
|
||||
|
||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||
return False
|
||||
return 0
|
||||
|
||||
varExpanded = d.expand(var)
|
||||
varExpanded = expand(var, d)
|
||||
|
||||
if unexport:
|
||||
o.write('unset %s\n' % varExpanded)
|
||||
return False
|
||||
return 0
|
||||
|
||||
if val is None:
|
||||
return False
|
||||
return 0
|
||||
|
||||
val = str(val)
|
||||
|
||||
if varExpanded.startswith("BASH_FUNC_"):
|
||||
varExpanded = varExpanded[10:-2]
|
||||
val = val[3:] # Strip off "() "
|
||||
o.write("%s() %s\n" % (varExpanded, val))
|
||||
o.write("export -f %s\n" % (varExpanded))
|
||||
return True
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
val = val.rstrip('\n')
|
||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||
return 1
|
||||
|
||||
@@ -240,12 +233,12 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
alter = re.sub('\n', ' \\\n', alter)
|
||||
alter = re.sub('\\$', '\\\\$', alter)
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return False
|
||||
return 0
|
||||
|
||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
isfunc = lambda key: bool(d.getVarFlag(key, "func", False))
|
||||
isfunc = lambda key: bool(d.getVarFlag(key, "func"))
|
||||
keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
|
||||
grouped = groupby(keys, isfunc)
|
||||
for isfunc, keys in grouped:
|
||||
@@ -254,8 +247,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
|
||||
def exported_keys(d):
|
||||
return (key for key in d.keys() if not key.startswith('__') and
|
||||
d.getVarFlag(key, 'export', False) and
|
||||
not d.getVarFlag(key, 'unexport', False))
|
||||
d.getVarFlag(key, 'export') and
|
||||
not d.getVarFlag(key, 'unexport'))
|
||||
|
||||
def exported_vars(d):
|
||||
for key in exported_keys(d):
|
||||
@@ -270,11 +263,10 @@ def exported_vars(d):
|
||||
def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
|
||||
for key in keys:
|
||||
emit_var(key, o, d, False)
|
||||
emit_var(key, o, d, False) and o.write('\n')
|
||||
|
||||
o.write('\n')
|
||||
emit_var(func, o, d, False) and o.write('\n')
|
||||
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
@@ -284,47 +276,12 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
|
||||
if d.getVarFlag(dep, "func") and not d.getVarFlag(dep, "python"):
|
||||
emit_var(dep, o, d, False) and o.write('\n')
|
||||
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}"""
|
||||
|
||||
def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
def write_func(func, o, call = False):
|
||||
body = d.getVar(func, False)
|
||||
if not body.startswith("def"):
|
||||
body = _functionfmt.format(function=func, body=body)
|
||||
|
||||
o.write(body.strip() + "\n\n")
|
||||
if call:
|
||||
o.write(func + "(d)" + "\n\n")
|
||||
|
||||
write_func(func, o, True)
|
||||
pp = bb.codeparser.PythonParser(func, logger)
|
||||
pp.parse_python(d.getVar(func, False))
|
||||
newdeps = pp.execs
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
seen = set()
|
||||
while newdeps:
|
||||
deps = newdeps
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func", False) and d.getVarFlag(dep, "python", False):
|
||||
write_func(dep, o)
|
||||
pp = bb.codeparser.PythonParser(dep, logger)
|
||||
pp.parse_python(d.getVar(dep, False))
|
||||
newdeps |= pp.execs
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
def update_data(d):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
d.finalize(parent = True)
|
||||
@@ -339,7 +296,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
return deps, value
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs"]) or {}
|
||||
vardeps = varflags.get("vardeps")
|
||||
value = d.getVar(key, False)
|
||||
|
||||
@@ -362,27 +319,27 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
value = varflags.get("vardepvalue")
|
||||
elif varflags.get("func"):
|
||||
if varflags.get("python"):
|
||||
parsedvar = d.expandWithRefs(value, key)
|
||||
parser = bb.codeparser.PythonParser(key, logger)
|
||||
if value and "\t" in value:
|
||||
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
|
||||
if parsedvar.value and "\t" in parsedvar.value:
|
||||
logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(parsedvar.value)
|
||||
deps = deps | parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
value = handle_contains(value, parser.contains, d)
|
||||
else:
|
||||
parsedvar = d.expandWithRefs(value, key)
|
||||
parser = bb.codeparser.ShellParser(key, logger)
|
||||
parser.parse_shell(parsedvar.value)
|
||||
deps = deps | shelldeps
|
||||
deps = deps | parsedvar.references
|
||||
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
||||
value = handle_contains(value, parsedvar.contains, d)
|
||||
if vardeps is None:
|
||||
parser.log.flush()
|
||||
if "prefuncs" in varflags:
|
||||
deps = deps | set(varflags["prefuncs"].split())
|
||||
if "postfuncs" in varflags:
|
||||
deps = deps | set(varflags["postfuncs"].split())
|
||||
deps = deps | parsedvar.references
|
||||
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
||||
value = handle_contains(value, parsedvar.contains, d)
|
||||
else:
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
@@ -407,8 +364,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps |= set((vardeps or "").split())
|
||||
deps -= set(varflags.get("vardepsexclude", "").split())
|
||||
except Exception as e:
|
||||
bb.warn("Exception during build_dependencies for %s" % key)
|
||||
raise
|
||||
raise bb.data_smart.ExpansionError(key, None, e)
|
||||
return deps, value
|
||||
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
|
||||
#d.setVarFlag(key, "vardeps", deps)
|
||||
@@ -416,13 +372,13 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
def generate_dependencies(d):
|
||||
|
||||
keys = set(key for key in d if not key.startswith("__"))
|
||||
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
|
||||
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
|
||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
|
||||
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
||||
newdeps = deps[task]
|
||||
@@ -440,7 +396,7 @@ def generate_dependencies(d):
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
val = d.getVar('__inherit_cache', False) or []
|
||||
val = getVar('__inherit_cache', d) or []
|
||||
needle = os.path.join('classes', '%s.bbclass' % klass)
|
||||
for v in val:
|
||||
if v.endswith(needle):
|
||||
|
||||
@@ -40,7 +40,7 @@ logger = logging.getLogger("BitBake.Data")
|
||||
|
||||
__setvar_keyword__ = ["_append", "_prepend", "_remove"]
|
||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>.*))?$')
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t :]+}")
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}@\n\t ]+}")
|
||||
__expand_python_regexp__ = re.compile(r"\${@.+?}")
|
||||
|
||||
def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
@@ -54,36 +54,27 @@ def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
return
|
||||
# Infer caller's likely values for variable (var) and value (value),
|
||||
# to reduce clutter in the rest of the code.
|
||||
above = None
|
||||
def set_above():
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
try:
|
||||
raise Exception
|
||||
except Exception:
|
||||
tb = sys.exc_info()[2]
|
||||
if parent:
|
||||
return tb.tb_frame.f_back.f_back.f_back
|
||||
above = tb.tb_frame.f_back.f_back
|
||||
else:
|
||||
return tb.tb_frame.f_back.f_back
|
||||
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
if not above:
|
||||
above = set_above()
|
||||
lcls = above.f_locals.items()
|
||||
above = tb.tb_frame.f_back
|
||||
lcls = above.f_locals.items()
|
||||
for k, v in lcls:
|
||||
if k == 'value' and 'detail' not in loginfo:
|
||||
loginfo['detail'] = v
|
||||
if k == 'var' and 'variable' not in loginfo:
|
||||
loginfo['variable'] = v
|
||||
# Infer file/line/function from traceback
|
||||
# Don't use traceback.extract_stack() since it fills the line contents which
|
||||
# we don't need and that hits stat syscalls
|
||||
if 'file' not in loginfo:
|
||||
if not above:
|
||||
above = set_above()
|
||||
f = above.f_back
|
||||
line = f.f_lineno
|
||||
file = f.f_code.co_filename
|
||||
func = f.f_code.co_name
|
||||
depth = 3
|
||||
if parent:
|
||||
depth = 4
|
||||
file, line, func, text = traceback.extract_stack(limit = depth)[0]
|
||||
loginfo['file'] = file
|
||||
loginfo['line'] = line
|
||||
if func not in loginfo:
|
||||
@@ -147,7 +138,7 @@ class DataContext(dict):
|
||||
|
||||
def __missing__(self, key):
|
||||
value = self.metadata.getVar(key, True)
|
||||
if value is None or self.metadata.getVarFlag(key, 'func', False):
|
||||
if value is None or self.metadata.getVarFlag(key, 'func'):
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return value
|
||||
@@ -240,10 +231,6 @@ class VariableHistory(object):
|
||||
|
||||
if var not in self.variables:
|
||||
self.variables[var] = []
|
||||
if not isinstance(self.variables[var], list):
|
||||
return
|
||||
if 'nodups' in loginfo and loginfo in self.variables[var]:
|
||||
return
|
||||
self.variables[var].append(loginfo.copy())
|
||||
|
||||
def variable(self, var):
|
||||
@@ -252,20 +239,8 @@ class VariableHistory(object):
|
||||
else:
|
||||
return []
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
def emit(self, var, oval, val, o):
|
||||
history = self.variable(var)
|
||||
|
||||
# Append override history
|
||||
if var in d.overridedata:
|
||||
for (r, override) in d.overridedata[var]:
|
||||
for event in self.variable(r):
|
||||
loginfo = event.copy()
|
||||
if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
|
||||
continue
|
||||
loginfo['variable'] = var
|
||||
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
|
||||
history.append(loginfo)
|
||||
|
||||
commentVal = re.sub('\n', '\n#', str(oval))
|
||||
if history:
|
||||
if len(history) == 1:
|
||||
@@ -288,7 +263,7 @@ class VariableHistory(object):
|
||||
flag = ''
|
||||
o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
|
||||
if len(history) > 1:
|
||||
o.write("# pre-expansion value:\n")
|
||||
o.write("# computed:\n")
|
||||
o.write('# "%s"\n' % (commentVal))
|
||||
else:
|
||||
o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
|
||||
@@ -312,31 +287,6 @@ class VariableHistory(object):
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
def get_variable_items_files(self, var, d):
|
||||
"""
|
||||
Use variable history to map items added to a list variable and
|
||||
the files in which they were added.
|
||||
"""
|
||||
history = self.variable(var)
|
||||
finalitems = (d.getVar(var, True) or '').split()
|
||||
filemap = {}
|
||||
isset = False
|
||||
for event in history:
|
||||
if 'flag' in event:
|
||||
continue
|
||||
if event['op'] == '_remove':
|
||||
continue
|
||||
if isset and event['op'] == 'set?':
|
||||
continue
|
||||
isset = True
|
||||
items = d.expand(event['detail']).split()
|
||||
for item in items:
|
||||
# This is a little crude but is belt-and-braces to avoid us
|
||||
# having to handle every possible operation type specifically
|
||||
if item in finalitems and not item in filemap:
|
||||
filemap[item] = event['file']
|
||||
return filemap
|
||||
|
||||
def del_var_history(self, var, f=None, line=None):
|
||||
"""If file f and line are not given, the entire history of var is deleted"""
|
||||
if var in self.variables:
|
||||
@@ -346,23 +296,18 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self):
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
# cookie monster tribute
|
||||
# Need to be careful about writes to overridedata as
|
||||
# its only a shallow copy, could influence other data store
|
||||
# copies!
|
||||
self.overridedata = {}
|
||||
self.overrides = None
|
||||
self.overridevars = set(["OVERRIDES", "FILE"])
|
||||
self.inoverride = False
|
||||
self._special_values = special
|
||||
self._seen_overrides = seen
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
def enableTracking(self):
|
||||
self._tracking = True
|
||||
@@ -384,21 +329,15 @@ class DataSmart(MutableMapping):
|
||||
olds = s
|
||||
try:
|
||||
s = __expand_var_regexp__.sub(varparse.var_sub, s)
|
||||
try:
|
||||
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
||||
except SyntaxError as e:
|
||||
# Likely unmatched brackets, just don't expand the expression
|
||||
if e.msg != "EOL while scanning string literal":
|
||||
raise
|
||||
s = __expand_python_regexp__.sub(varparse.python_sub, s)
|
||||
if s == olds:
|
||||
break
|
||||
except ExpansionError:
|
||||
raise
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception as exc:
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
raise ExpansionError, ExpansionError(varname, s, exc), tb
|
||||
raise ExpansionError(varname, s, exc)
|
||||
|
||||
varparse.value = s
|
||||
|
||||
@@ -410,34 +349,97 @@ class DataSmart(MutableMapping):
|
||||
def expand(self, s, varname = None):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
def finalize(self, parent = False):
|
||||
return
|
||||
|
||||
def internal_finalize(self, parent = False):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
self.overrides = None
|
||||
|
||||
def need_overrides(self):
|
||||
if self.overrides is not None:
|
||||
return
|
||||
if self.inoverride:
|
||||
return
|
||||
for count in range(5):
|
||||
self.inoverride = True
|
||||
# Can end up here recursively so setup dummy values
|
||||
self.overrides = []
|
||||
self.overridesset = set()
|
||||
self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
self.overridesset = set(self.overrides)
|
||||
self.inoverride = False
|
||||
self.expand_cache = {}
|
||||
newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
if newoverrides == self.overrides:
|
||||
break
|
||||
self.overrides = newoverrides
|
||||
self.overridesset = set(self.overrides)
|
||||
else:
|
||||
bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
|
||||
overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
finalize_caller = {
|
||||
'op': 'finalize',
|
||||
}
|
||||
infer_caller_details(finalize_caller, parent = parent, varval = False)
|
||||
|
||||
#
|
||||
# Well let us see what breaks here. We used to iterate
|
||||
# over each variable and apply the override and then
|
||||
# do the line expanding.
|
||||
# If we have bad luck - which we will have - the keys
|
||||
# where in some order that is so important for this
|
||||
# method which we don't have anymore.
|
||||
# Anyway we will fix that and write test cases this
|
||||
# time.
|
||||
|
||||
#
|
||||
# First we apply all overrides
|
||||
# Then we will handle _append and _prepend and store the _remove
|
||||
# information for later.
|
||||
#
|
||||
|
||||
# We only want to report finalization once per variable overridden.
|
||||
finalizes_reported = {}
|
||||
|
||||
for o in overrides:
|
||||
# calculate '_'+override
|
||||
l = len(o) + 1
|
||||
|
||||
# see if one should even try
|
||||
if o not in self._seen_overrides:
|
||||
continue
|
||||
|
||||
vars = self._seen_overrides[o].copy()
|
||||
for var in vars:
|
||||
name = var[:-l]
|
||||
try:
|
||||
# Report only once, even if multiple changes.
|
||||
if name not in finalizes_reported:
|
||||
finalizes_reported[name] = True
|
||||
finalize_caller['variable'] = name
|
||||
finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
|
||||
self.varhistory.record(**finalize_caller)
|
||||
# Copy history of the override over.
|
||||
for event in self.varhistory.variable(var):
|
||||
loginfo = event.copy()
|
||||
loginfo['variable'] = name
|
||||
loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
|
||||
self.delVar(var)
|
||||
except Exception:
|
||||
logger.info("Untracked delVar")
|
||||
|
||||
# now on to the appends and prepends, and stashing the removes
|
||||
for op in __setvar_keyword__:
|
||||
if op in self._special_values:
|
||||
appends = self._special_values[op] or []
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
if op == "_append":
|
||||
sval = self.getVar(append, False) or ""
|
||||
sval += a
|
||||
self.setVar(append, sval)
|
||||
elif op == "_prepend":
|
||||
sval = a + (self.getVar(append, False) or "")
|
||||
self.setVar(append, sval)
|
||||
elif op == "_remove":
|
||||
removes = self.getVarFlag(append, "_removeactive", False) or []
|
||||
removes.extend(a.split())
|
||||
self.setVarFlag(append, "_removeactive", removes, ignore=True)
|
||||
|
||||
# We save overrides that may be applied at some later stage
|
||||
if keep:
|
||||
self.setVarFlag(append, op, keep, ignore=True)
|
||||
else:
|
||||
self.delVarFlag(append, op, ignore=True)
|
||||
|
||||
def initVar(self, var):
|
||||
self.expand_cache = {}
|
||||
@@ -468,10 +470,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def setVar(self, var, value, **loginfo):
|
||||
#print("var=" + str(var) + " val=" + str(value))
|
||||
parsing=False
|
||||
if 'parsing' in loginfo:
|
||||
parsing=True
|
||||
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
self.expand_cache = {}
|
||||
@@ -480,7 +478,7 @@ class DataSmart(MutableMapping):
|
||||
base = match.group('base')
|
||||
keyword = match.group("keyword")
|
||||
override = match.group('add')
|
||||
l = self.getVarFlag(base, keyword, False) or []
|
||||
l = self.getVarFlag(base, keyword) or []
|
||||
l.append([value, override])
|
||||
self.setVarFlag(base, keyword, l, ignore=True)
|
||||
# And cause that to be recorded:
|
||||
@@ -493,111 +491,60 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
# todo make sure keyword is not __doc__ or __module__
|
||||
# pay the cookie monster
|
||||
try:
|
||||
self._special_values[keyword].add(base)
|
||||
except KeyError:
|
||||
self._special_values[keyword] = set()
|
||||
self._special_values[keyword].add(base)
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(base, **loginfo)
|
||||
|
||||
if base in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
return
|
||||
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if not parsing:
|
||||
if "_append" in self.dict[var]:
|
||||
del self.dict[var]["_append"]
|
||||
if "_prepend" in self.dict[var]:
|
||||
del self.dict[var]["_prepend"]
|
||||
if var in self.overridedata:
|
||||
active = []
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
active.append(r)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active.append(r)
|
||||
for a in active:
|
||||
self.delVar(a)
|
||||
del self.overridedata[var]
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
# setting var
|
||||
self.dict[var]["_content"] = value
|
||||
self.varhistory.record(**loginfo)
|
||||
|
||||
if var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
|
||||
def _setvar_update_overridevars(self, var, value):
|
||||
vardata = self.expandWithRefs(value, var)
|
||||
new = vardata.references
|
||||
new.update(vardata.contains.keys())
|
||||
while not new.issubset(self.overridevars):
|
||||
nextnew = set()
|
||||
self.overridevars.update(new)
|
||||
for i in new:
|
||||
vardata = self.expandWithRefs(self.getVar(i, True), i)
|
||||
nextnew.update(vardata.references)
|
||||
nextnew.update(vardata.contains.keys())
|
||||
new = nextnew
|
||||
self.internal_finalize(True)
|
||||
|
||||
def _setvar_update_overrides(self, var, **loginfo):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
if shortvar not in self.overridedata:
|
||||
self.overridedata[shortvar] = []
|
||||
if [var, override] not in self.overridedata[shortvar]:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].append([var, override])
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if len(override) > 0:
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
|
||||
def getVar(self, var, expand, noweakdefault=False, parsing=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
|
||||
def renameVar(self, key, newkey, **loginfo):
|
||||
"""
|
||||
Rename the variable key to newkey
|
||||
"""
|
||||
val = self.getVar(key, 0, parsing=True)
|
||||
val = self.getVar(key, 0)
|
||||
if val is not None:
|
||||
loginfo['variable'] = newkey
|
||||
loginfo['op'] = 'rename from %s' % key
|
||||
loginfo['detail'] = val
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(newkey, val, ignore=True, parsing=True)
|
||||
self.setVar(newkey, val, ignore=True)
|
||||
|
||||
for i in (__setvar_keyword__):
|
||||
src = self.getVarFlag(key, i, False)
|
||||
src = self.getVarFlag(key, i)
|
||||
if src is None:
|
||||
continue
|
||||
|
||||
dest = self.getVarFlag(newkey, i, False) or []
|
||||
dest = self.getVarFlag(newkey, i) or []
|
||||
dest.extend(src)
|
||||
self.setVarFlag(newkey, i, dest, ignore=True)
|
||||
|
||||
if key in self.overridedata:
|
||||
self.overridedata[newkey] = []
|
||||
for (v, o) in self.overridedata[key]:
|
||||
self.overridedata[newkey].append([v.replace(key, newkey), o])
|
||||
self.renameVar(v, v.replace(key, newkey))
|
||||
|
||||
if '_' in newkey and val is None:
|
||||
self._setvar_update_overrides(newkey, **loginfo)
|
||||
if i in self._special_values and key in self._special_values[i]:
|
||||
self._special_values[i].remove(key)
|
||||
self._special_values[i].add(newkey)
|
||||
|
||||
loginfo['variable'] = key
|
||||
loginfo['op'] = 'rename (to)'
|
||||
@@ -608,12 +555,14 @@ class DataSmart(MutableMapping):
|
||||
def appendVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'append'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_append", value, ignore=True, parsing=True)
|
||||
newvalue = (self.getVar(var, False) or "") + value
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def prependVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'prepend'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
|
||||
newvalue = value + (self.getVar(var, False) or "")
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def delVar(self, var, **loginfo):
|
||||
loginfo['detail'] = ""
|
||||
@@ -621,28 +570,12 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
if var in self.overridedata:
|
||||
del self.overridedata[var]
|
||||
if '_' in var:
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
try:
|
||||
if shortvar in self.overridedata:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].remove([var, override])
|
||||
except ValueError as e:
|
||||
pass
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if override and override in self._seen_overrides and var in self._seen_overrides[override]:
|
||||
self._seen_overrides[override].remove(var)
|
||||
|
||||
def setVarFlag(self, var, flag, value, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
loginfo['flag'] = flag
|
||||
@@ -651,10 +584,8 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
if flag == "_defaultval" and var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
if not "__exportlist" in self.dict:
|
||||
@@ -663,71 +594,14 @@ class DataSmart(MutableMapping):
|
||||
self.dict["__exportlist"]["_content"] = set()
|
||||
self.dict["__exportlist"]["_content"].add(var)
|
||||
|
||||
def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False):
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
|
||||
local_var = self._findVar(var)
|
||||
value = None
|
||||
if flag == "_content" and var in self.overridedata and not parsing:
|
||||
match = False
|
||||
active = {}
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
# What about double overrides both with "_" in the name?
|
||||
if o in self.overridesset:
|
||||
active[o] = r
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active[o] = r
|
||||
|
||||
mod = True
|
||||
while mod:
|
||||
mod = False
|
||||
for o in self.overrides:
|
||||
for a in active.copy():
|
||||
if a.endswith("_" + o):
|
||||
t = active[a]
|
||||
del active[a]
|
||||
active[a.replace("_" + o, "")] = t
|
||||
mod = True
|
||||
elif a == o:
|
||||
match = active[a]
|
||||
del active[a]
|
||||
if match:
|
||||
value = self.getVar(match, False)
|
||||
|
||||
if local_var is not None and value is None:
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
|
||||
|
||||
if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_append"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = value + r
|
||||
|
||||
if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_prepend"]:
|
||||
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = r + value
|
||||
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -736,30 +610,17 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_remove" in local_var:
|
||||
removes = []
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_remove"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
removes.extend(self.expand(r).split())
|
||||
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
filtered = filter(lambda v: v not in local_var["_removeactive"],
|
||||
value.split(" "))
|
||||
value = " ".join(filtered)
|
||||
if expand and var in self.expand_cache:
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
# flag == "_content" here
|
||||
self.expand_cache[var].value = value
|
||||
return value
|
||||
|
||||
def delVarFlag(self, var, flag, **loginfo):
|
||||
self.expand_cache = {}
|
||||
local_var = self._findVar(var)
|
||||
if not local_var:
|
||||
return
|
||||
@@ -789,7 +650,6 @@ class DataSmart(MutableMapping):
|
||||
self.setVarFlag(var, flag, newvalue, ignore=True)
|
||||
|
||||
def setVarFlags(self, var, flags, **loginfo):
|
||||
self.expand_cache = {}
|
||||
infer_caller_details(loginfo)
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -819,7 +679,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
|
||||
def delVarFlags(self, var, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
@@ -837,12 +696,13 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
|
||||
def createCopy(self):
|
||||
"""
|
||||
Create a copy of self by setting _data to self
|
||||
"""
|
||||
# we really want this to be a DataSmart...
|
||||
data = DataSmart()
|
||||
data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
|
||||
data.dict["_data"] = self.dict
|
||||
data.varhistory = self.varhistory.copy()
|
||||
data.varhistory.datasmart = data
|
||||
@@ -850,12 +710,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
data._tracking = self._tracking
|
||||
|
||||
data.overrides = None
|
||||
data.overridevars = copy.copy(self.overridevars)
|
||||
# Should really be a deepcopy but has heavy overhead.
|
||||
# Instead, we're careful with writes.
|
||||
data.overridedata = copy.copy(self.overridedata)
|
||||
|
||||
return data
|
||||
|
||||
def expandVarref(self, variable, parents=False):
|
||||
@@ -880,19 +734,12 @@ class DataSmart(MutableMapping):
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
overrides = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
if key == "_data":
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if key in overrides:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
klist.add(key)
|
||||
|
||||
@@ -901,23 +748,11 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return klist
|
||||
|
||||
self.need_overrides()
|
||||
for var in self.overridedata:
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
overrides.add(var)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
overrides.add(var)
|
||||
|
||||
for k in keylist(self.dict):
|
||||
yield k
|
||||
|
||||
for k in overrides:
|
||||
yield k
|
||||
|
||||
def __len__(self):
|
||||
return len(frozenset(iter(self)))
|
||||
return len(frozenset(self))
|
||||
|
||||
def __getitem__(self, item):
|
||||
value = self.getVar(item, False)
|
||||
@@ -962,7 +797,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
if key == "__BBANONFUNCS":
|
||||
for i in bb_list:
|
||||
value = d.getVar(i, False) or ""
|
||||
value = d.getVar(i, True) or ""
|
||||
data.update({i:value})
|
||||
|
||||
data_str = str([(k, data[k]) for k in sorted(data.keys())])
|
||||
|
||||
@@ -31,7 +31,6 @@ except ImportError:
|
||||
import logging
|
||||
import atexit
|
||||
import traceback
|
||||
import ast
|
||||
import bb.utils
|
||||
import bb.compat
|
||||
import bb.exceptions
|
||||
@@ -56,7 +55,6 @@ def get_class_handlers():
|
||||
return _handlers
|
||||
|
||||
def set_class_handlers(h):
|
||||
global _handlers
|
||||
_handlers = h
|
||||
|
||||
def clean_class_handlers():
|
||||
@@ -69,23 +67,12 @@ _ui_logfilters = {}
|
||||
_ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
_uiready = False
|
||||
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in builtins:
|
||||
builtins['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception:
|
||||
etype, value, tb = sys.exc_info()
|
||||
@@ -98,8 +85,6 @@ def execute_handler(name, handler, event, d):
|
||||
raise
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del builtins['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -109,10 +94,10 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
continue
|
||||
execute_handler(name, handler, event, d)
|
||||
try:
|
||||
execute_handler(name, handler, event, d)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ui_queue = []
|
||||
@atexit.register
|
||||
@@ -120,7 +105,7 @@ def print_ui_queue():
|
||||
"""If we're exiting before a UI has been spawned, display any queued
|
||||
LogRecords to the console."""
|
||||
logger = logging.getLogger("BitBake")
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
from bb.msg import BBLogFormatter
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||
@@ -142,7 +127,7 @@ def print_ui_queue():
|
||||
logger.handle(event)
|
||||
|
||||
def fire_ui_handlers(event, d):
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
# No UI handlers registered yet, queue up the messages
|
||||
ui_queue.append(event)
|
||||
return
|
||||
@@ -183,7 +168,7 @@ def fire_from_worker(event, d):
|
||||
fire_ui_handlers(event, d)
|
||||
|
||||
noop = lambda _: None
|
||||
def register(name, handler, mask=None, filename=None, lineno=None):
|
||||
def register(name, handler, mask=[]):
|
||||
"""Register an Event handler"""
|
||||
|
||||
# already registered
|
||||
@@ -195,15 +180,7 @@ def register(name, handler, mask=None, filename=None, lineno=None):
|
||||
if isinstance(handler, basestring):
|
||||
tmp = "def %s(e):\n%s" % (name, handler)
|
||||
try:
|
||||
code = bb.methodpool.compile_cache(tmp)
|
||||
if not code:
|
||||
if filename is None:
|
||||
filename = "%s(e)" % name
|
||||
code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
|
||||
if lineno is not None:
|
||||
ast.increment_lineno(code, lineno-1)
|
||||
code = compile(code, filename, "exec")
|
||||
bb.methodpool.compile_cache_add(tmp, code)
|
||||
code = compile(tmp, "%s(e)" % name, "exec")
|
||||
except SyntaxError:
|
||||
logger.error("Unable to register event handler '%s':\n%s", name,
|
||||
''.join(traceback.format_exc(limit=0)))
|
||||
@@ -230,14 +207,7 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler, mainui=False):
|
||||
if mainui:
|
||||
global _uiready
|
||||
_uiready = True
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
level, debug_domains = bb.msg.constructLogOptions()
|
||||
@@ -388,12 +358,11 @@ class BuildStarted(BuildBase, OperationStarted):
|
||||
|
||||
class BuildCompleted(BuildBase, OperationCompleted):
|
||||
"""bbmake build run completed"""
|
||||
def __init__(self, total, n, p, failures=0, interrupted=0):
|
||||
def __init__(self, total, n, p, failures = 0):
|
||||
if not failures:
|
||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
||||
else:
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
self._interrupted = interrupted
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class DiskFull(Event):
|
||||
@@ -408,7 +377,7 @@ class DiskFull(Event):
|
||||
class NoProvider(Event):
|
||||
"""No Provider for an Event"""
|
||||
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]):
|
||||
Event.__init__(self)
|
||||
self._item = item
|
||||
self._runtime = runtime
|
||||
@@ -522,16 +491,6 @@ class TargetsTreeGenerated(Event):
|
||||
Event.__init__(self)
|
||||
self._model = model
|
||||
|
||||
class ReachableStamps(Event):
|
||||
"""
|
||||
An event listing all stamps reachable after parsing
|
||||
which the metadata may use to clean up stale data
|
||||
"""
|
||||
|
||||
def __init__(self, stamps):
|
||||
Event.__init__(self)
|
||||
self.stamps = stamps
|
||||
|
||||
class FilesMatchingFound(Event):
|
||||
"""
|
||||
Event when a list of files matching the supplied pattern has
|
||||
@@ -609,10 +568,7 @@ class LogHandler(logging.Handler):
|
||||
etype, value, tb = record.exc_info
|
||||
if hasattr(tb, 'tb_next'):
|
||||
tb = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
# Need to turn the value into something the logging system can pickle
|
||||
record.bb_exc_info = (etype, value, tb)
|
||||
record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
value = str(value)
|
||||
record.exc_info = None
|
||||
fire(record, None)
|
||||
|
||||
@@ -641,11 +597,11 @@ class MetadataEvent(Event):
|
||||
def __init__(self, eventtype, eventdata):
|
||||
Event.__init__(self)
|
||||
self.type = eventtype
|
||||
self._localdata = eventdata
|
||||
self.data = eventdata
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to run sanity checks, either raise errors or generate events as return status.
|
||||
Event to runs sanity checks, either raise errors or generate events as return status.
|
||||
"""
|
||||
def __init__(self, generateevents = True):
|
||||
Event.__init__(self)
|
||||
@@ -653,7 +609,7 @@ class SanityCheck(Event):
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has passed
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,263 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' clearcase implementation
|
||||
|
||||
The clearcase fetcher is used to retrieve files from a ClearCase repository.
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
Supported SRC_URI options are:
|
||||
|
||||
- vob
|
||||
(required) The name of the clearcase VOB (with prepending "/")
|
||||
|
||||
- module
|
||||
The module in the selected VOB (with prepending "/")
|
||||
|
||||
The module and vob parameters are combined to create
|
||||
the following load rule in the view config spec:
|
||||
load <vob><module>
|
||||
|
||||
- proto
|
||||
http or https
|
||||
|
||||
Related variables:
|
||||
|
||||
CCASE_CUSTOM_CONFIG_SPEC
|
||||
Write a config spec to this variable in your recipe to use it instead
|
||||
of the default config spec generated by this fetcher.
|
||||
Please note that the SRCREV loses its functionality if you specify
|
||||
this variable. SRCREV is still used to label the archive after a fetch,
|
||||
but it doesn't define what's fetched.
|
||||
|
||||
User credentials:
|
||||
cleartool:
|
||||
The login of cleartool is handled by the system. No special steps needed.
|
||||
|
||||
rcleartool:
|
||||
In order to use rcleartool with authenticated users an `rcleartool login` is
|
||||
necessary before using the fetcher.
|
||||
"""
|
||||
# Copyright (C) 2014 Siemens AG
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from distutils import spawn
|
||||
|
||||
class ClearCase(FetchMethod):
|
||||
"""Class to fetch urls via 'clearcase'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with Clearcase.
|
||||
"""
|
||||
return ud.type in ['ccrc']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "ClearCase: %s", msg)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init ClearCase specific variable within url data
|
||||
"""
|
||||
ud.proto = "https"
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
if not ud.proto in ('http', 'https'):
|
||||
raise fetch2.ParameterError("Invalid protocol type", ud.url)
|
||||
|
||||
ud.vob = ''
|
||||
if 'vob' in ud.parm:
|
||||
ud.vob = ud.parm['vob']
|
||||
else:
|
||||
msg = ud.url+": vob must be defined so the fetcher knows what to get."
|
||||
raise MissingParameterError('vob', msg)
|
||||
|
||||
if 'module' in ud.parm:
|
||||
ud.module = ud.parm['module']
|
||||
else:
|
||||
ud.module = ""
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
|
||||
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV", False)
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
|
||||
ud.module.replace("/", "."),
|
||||
ud.label.replace("/", "."))
|
||||
|
||||
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
|
||||
ud.csname = "%s-config-spec" % (ud.identifier)
|
||||
ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
|
||||
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
|
||||
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
|
||||
ud.localfile = "%s.tar.gz" % (ud.identifier)
|
||||
|
||||
self.debug("host = %s" % ud.host)
|
||||
self.debug("path = %s" % ud.path)
|
||||
self.debug("server = %s" % ud.server)
|
||||
self.debug("proto = %s" % ud.proto)
|
||||
self.debug("type = %s" % ud.type)
|
||||
self.debug("vob = %s" % ud.vob)
|
||||
self.debug("module = %s" % ud.module)
|
||||
self.debug("basecmd = %s" % ud.basecmd)
|
||||
self.debug("label = %s" % ud.label)
|
||||
self.debug("ccasedir = %s" % ud.ccasedir)
|
||||
self.debug("viewdir = %s" % ud.viewdir)
|
||||
self.debug("viewname = %s" % ud.viewname)
|
||||
self.debug("configspecfile = %s" % ud.configspecfile)
|
||||
self.debug("localfile = %s" % ud.localfile)
|
||||
|
||||
ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _build_ccase_command(self, ud, command):
|
||||
"""
|
||||
Build up a commandline based on ud
|
||||
command is: mkview, setcs, rmview
|
||||
"""
|
||||
options = []
|
||||
|
||||
if "rcleartool" in ud.basecmd:
|
||||
options.append("-server %s" % ud.server)
|
||||
|
||||
basecmd = "%s %s" % (ud.basecmd, command)
|
||||
|
||||
if command is 'mkview':
|
||||
if not "rcleartool" in ud.basecmd:
|
||||
# Cleartool needs a -snapshot view
|
||||
options.append("-snapshot")
|
||||
options.append("-tag %s" % ud.viewname)
|
||||
options.append(ud.viewdir)
|
||||
|
||||
elif command is 'rmview':
|
||||
options.append("-force")
|
||||
options.append("%s" % ud.viewdir)
|
||||
|
||||
elif command is 'setcs':
|
||||
options.append("-overwrite")
|
||||
options.append(ud.configspecfile)
|
||||
|
||||
else:
|
||||
raise FetchError("Invalid ccase command %s" % command)
|
||||
|
||||
ccasecmd = "%s %s" % (basecmd, " ".join(options))
|
||||
self.debug("ccasecmd = %s" % ccasecmd)
|
||||
return ccasecmd
|
||||
|
||||
def _write_configspec(self, ud, d):
|
||||
"""
|
||||
Create config spec file (ud.configspecfile) for ccase view
|
||||
"""
|
||||
config_spec = ""
|
||||
custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
|
||||
if custom_config_spec is not None:
|
||||
for line in custom_config_spec.split("\\n"):
|
||||
config_spec += line+"\n"
|
||||
bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
|
||||
else:
|
||||
config_spec += "element * CHECKEDOUT\n"
|
||||
config_spec += "element * %s\n" % ud.label
|
||||
config_spec += "load %s%s\n" % (ud.vob, ud.module)
|
||||
|
||||
logger.info("Using config spec: \n%s" % config_spec)
|
||||
|
||||
with open(ud.configspecfile, 'w') as f:
|
||||
f.write(config_spec)
|
||||
|
||||
def _remove_view(self, ud, d):
|
||||
if os.path.exists(ud.viewdir):
|
||||
os.chdir(ud.ccasedir)
|
||||
cmd = self._build_ccase_command(ud, 'rmview');
|
||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("rmview output: %s", output)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
|
||||
ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
|
||||
return True
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
return False, ud.identifier
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# Make a fresh view
|
||||
bb.utils.mkdirhier(ud.ccasedir)
|
||||
self._write_configspec(ud, d)
|
||||
cmd = self._build_ccase_command(ud, 'mkview')
|
||||
logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
try:
|
||||
runfetchcmd(cmd, d)
|
||||
except FetchError as e:
|
||||
if "CRCLI2008E" in e.msg:
|
||||
raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
|
||||
else:
|
||||
raise e
|
||||
|
||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
||||
os.chdir(ud.viewdir)
|
||||
cmd = self._build_ccase_command(ud, 'setcs');
|
||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("%s", output)
|
||||
|
||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
||||
shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
|
||||
|
||||
# Clean clearcase meta-data before tar
|
||||
|
||||
runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
|
||||
|
||||
# Clean up so we can create a new view next time
|
||||
self.clean(ud, d);
|
||||
|
||||
def clean(self, ud, d):
|
||||
self._remove_view(ud, d)
|
||||
bb.utils.remove(ud.configspecfile)
|
||||
@@ -66,11 +66,8 @@ Supported SRC_URI options are:
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
import errno
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import runfetchcmd
|
||||
@@ -126,7 +123,7 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = branch
|
||||
ud.unresolvedrev[name] = branch
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||
|
||||
@@ -139,10 +136,7 @@ class Git(FetchMethod):
|
||||
ud.unresolvedrev[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
if gitsrcname.startswith('.'):
|
||||
gitsrcname = gitsrcname[1:]
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -183,13 +177,20 @@ class Git(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
repourl = self._get_repo_url(ud)
|
||||
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
@@ -220,11 +221,7 @@ class Git(FetchMethod):
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
ud.repochanged = True
|
||||
os.chdir(ud.clonedir)
|
||||
for name in ud.names:
|
||||
if not self._contains_ref(ud, d, name):
|
||||
@@ -232,7 +229,7 @@ class Git(FetchMethod):
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
@@ -262,23 +259,30 @@ class Git(FetchMethod):
|
||||
if ud.bareclone:
|
||||
cloneflags += " --mirror"
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
|
||||
os.chdir(destdir)
|
||||
repourl = self._get_repo_url(ud)
|
||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
|
||||
# Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
|
||||
# and you end up with some horrible union of the two when you attempt to clone it
|
||||
# The least invasive workaround seems to be a symlink to the real directory to
|
||||
# fool git into ignoring any .git version that may also be present.
|
||||
#
|
||||
# The issue is fixed in more recent versions of git so we can drop this hack in future
|
||||
# when that version becomes common enough.
|
||||
clonedir = ud.clonedir
|
||||
if not ud.path.endswith(".git"):
|
||||
indirectiondir = destdir[:-1] + ".indirectionsymlink"
|
||||
if os.path.exists(indirectiondir):
|
||||
os.remove(indirectiondir)
|
||||
bb.utils.mkdirhier(os.path.dirname(indirectiondir))
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||
elif not ud.nobranch:
|
||||
branchname = ud.branches[ud.names[0]]
|
||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
||||
ud.revisions[ud.names[0]]), d)
|
||||
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
|
||||
branchname), d)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||
|
||||
return True
|
||||
|
||||
def clean(self, ud, d):
|
||||
@@ -307,16 +311,6 @@ class Git(FetchMethod):
|
||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _get_repo_url(self, ud):
|
||||
"""
|
||||
Return the repository URL
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
@@ -327,9 +321,13 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Run git ls-remote with the specified search string
|
||||
"""
|
||||
repourl = self._get_repo_url(ud)
|
||||
cmd = "%s ls-remote %s %s" % \
|
||||
(ud.basecmd, repourl, search)
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(ud.basecmd, ud.proto, username, ud.host, ud.path, search)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
@@ -341,95 +339,17 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
pupver = ('', '')
|
||||
|
||||
tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return pupver
|
||||
|
||||
verstring = ""
|
||||
revision = ""
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
tag_head = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", tag_head)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(tag_head)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
|
||||
verstring = tag
|
||||
revision = line.split()[0]
|
||||
pupver = (verstring, revision)
|
||||
|
||||
return pupver
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def gitpkgv_revision(self, ud, d, name):
|
||||
"""
|
||||
Return a sortable revision number by counting commits in the history
|
||||
Based on gitpkgv.bblass in meta-openembedded
|
||||
"""
|
||||
rev = self._build_revision(ud, d, name)
|
||||
localpath = ud.localpath
|
||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
||||
if not os.path.exists(localpath):
|
||||
commits = None
|
||||
else:
|
||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
||||
from pipes import quote
|
||||
commits = bb.fetch2.runfetchcmd(
|
||||
"git rev-list %s -- | wc -l" % (quote(rev)),
|
||||
d, quiet=True).strip().lstrip('0')
|
||||
if commits:
|
||||
open(rev_file, "w").write("%d\n" % int(commits))
|
||||
else:
|
||||
commits = open(rev_file, "r").readline(128).strip()
|
||||
if commits:
|
||||
return False, "%s+%s" % (commits, rev[:7])
|
||||
else:
|
||||
return True, str(rev)
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -2,16 +2,6 @@
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git submodules implementation
|
||||
|
||||
Inherits from and extends the Git fetcher to retrieve submodules of a git repository
|
||||
after cloning.
|
||||
|
||||
SRC_URI = "gitsm://<see Git fetcher for syntax>"
|
||||
|
||||
See the Git fetcher, git://, for usage documentation.
|
||||
|
||||
NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2013 Richard Purdie
|
||||
@@ -109,8 +99,8 @@ class GitSM(Git):
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
||||
os.chdir(tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d)
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
|
||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
self._set_relative_paths(tmpclonedir)
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
|
||||
os.rename(gitdir, ud.clonedir,)
|
||||
@@ -130,5 +120,7 @@ class GitSM(Git):
|
||||
os.chdir(ud.destdir)
|
||||
submodules = self.uses_submodules(ud, d)
|
||||
if submodules:
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
|
||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d)
|
||||
runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
|
||||
|
||||
@@ -28,7 +28,6 @@ import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
import errno
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
@@ -44,13 +43,6 @@ class Hg(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Don't require checksums for local archives created from
|
||||
repository checkouts.
|
||||
"""
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init hg specific variable within url data
|
||||
@@ -60,12 +52,10 @@ class Hg(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "hg"
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
@@ -74,19 +64,7 @@ class Hg(FetchMethod):
|
||||
elif not ud.revision:
|
||||
ud.revision = self.latest_revision(ud, d)
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
||||
ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
|
||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
ud.localfile = ud.moddir
|
||||
ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
|
||||
|
||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
@@ -96,21 +74,14 @@ class Hg(FetchMethod):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.moddir):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
|
||||
host = ud.host
|
||||
@@ -127,7 +98,7 @@ class Hg(FetchMethod):
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command == "info":
|
||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
|
||||
@@ -140,22 +111,19 @@ class Hg(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
else:
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command == "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
|
||||
else:
|
||||
cmd = "%s pull" % (ud.basecmd)
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
||||
cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
@@ -166,36 +134,16 @@ class Hg(FetchMethod):
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
# Runnning pull in the repo
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
# No source found, clone it.
|
||||
if not os.path.exists(ud.moddir):
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
@@ -212,12 +160,14 @@ class Hg(FetchMethod):
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean the hg dir """
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
bb.utils.remove(ud.localpath, True)
|
||||
bb.utils.remove(ud.fullmirror)
|
||||
bb.utils.remove(ud.fullmirror + ".done")
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
@@ -238,41 +188,3 @@ class Hg(FetchMethod):
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.info("Creating tarball of hg repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def localpath(self, ud, d):
|
||||
return ud.pkgdir
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
"""
|
||||
Make a local clone or export for the url
|
||||
"""
|
||||
|
||||
revflag = "-r %s" % ud.revision
|
||||
subdir = ud.parm.get("destsuffix", ud.module)
|
||||
codir = "%s/%s" % (destdir, subdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata != "nokeep":
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
os.chdir(codir)
|
||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
os.chdir(ud.moddir)
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
|
||||
|
||||
@@ -45,48 +45,35 @@ class Local(FetchMethod):
|
||||
ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.basename = os.path.basename(ud.decodedurl)
|
||||
ud.basepath = ud.decodedurl
|
||||
ud.needdonestamp = False
|
||||
return
|
||||
|
||||
def localpath(self, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
return self.localpaths(urldata, d)[-1]
|
||||
|
||||
def localpaths(self, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
searched = []
|
||||
path = urldata.decodedurl
|
||||
newpath = path
|
||||
if path[0] == "/":
|
||||
return [path]
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath, hist = bb.utils.which(filespath, path, history=True)
|
||||
searched.extend(hist)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
searched.append(newpath)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath, hist = bb.utils.which(filespath, ".", history=True)
|
||||
searched.extend(hist)
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return searched
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
searched.append(dldirfile)
|
||||
return searched
|
||||
return searched
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath = bb.utils.which(filespath, ".")
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return newpath
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
return dldirfile
|
||||
return newpath
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ud.url.find("*") != -1:
|
||||
@@ -113,7 +100,7 @@ class Local(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
|
||||
@@ -1,284 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' NPM implementation
|
||||
|
||||
The NPM fetcher is used to retrieve files from the npmjs repository
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
|
||||
Suported SRC_URI options are:
|
||||
|
||||
- name
|
||||
- version
|
||||
|
||||
npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};ver=${PV}
|
||||
The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib
|
||||
import json
|
||||
import subprocess
|
||||
import signal
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import ChecksumError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import UnpackError
|
||||
from bb.fetch2 import ParameterError
|
||||
from distutils import spawn
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
# non-Python subprocesses expect.
|
||||
# SIGPIPE errors are known issues with gzip/bash
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
class Npm(FetchMethod):
|
||||
|
||||
"""Class to fetch urls via 'npm'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with npm
|
||||
"""
|
||||
return ud.type in ['npm']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "NpmFetch: %s", msg)
|
||||
|
||||
def clean(self, ud, d):
|
||||
logger.debug(2, "Calling cleanup %s" % ud.pkgname)
|
||||
bb.utils.remove(ud.localpath, False)
|
||||
bb.utils.remove(ud.pkgdatadir, True)
|
||||
bb.utils.remove(ud.fullmirror, False)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init NPM specific variable within url data
|
||||
"""
|
||||
if 'downloadfilename' in ud.parm:
|
||||
ud.basename = ud.parm['downloadfilename']
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
# can't call it ud.name otherwise fetcher base class will start doing sha1stuff
|
||||
# TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
|
||||
ud.pkgname = ud.parm.get("name", None)
|
||||
if not ud.pkgname:
|
||||
raise ParameterError("NPM fetcher requires a name parameter", ud.url)
|
||||
ud.version = ud.parm.get("version", None)
|
||||
if not ud.version:
|
||||
raise ParameterError("NPM fetcher requires a version parameter", ud.url)
|
||||
ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
|
||||
ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
|
||||
prefixdir = "npm/%s" % ud.pkgname
|
||||
ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
|
||||
if not os.path.exists(ud.pkgdatadir):
|
||||
bb.utils.mkdirhier(ud.pkgdatadir)
|
||||
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
|
||||
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
|
||||
self.basecmd += " --directory-prefix=%s " % prefixdir
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0")
|
||||
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _runwget(self, ud, d, command, quiet):
|
||||
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
|
||||
bb.fetch2.check_network_access(d, command)
|
||||
runfetchcmd(command, d, quiet)
|
||||
|
||||
def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
|
||||
file = data[pkg]['tgz']
|
||||
logger.debug(2, "file to extract is %s" % file)
|
||||
if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
|
||||
cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
|
||||
else:
|
||||
bb.fatal("NPM package %s downloaded not a tarball!" % file)
|
||||
|
||||
# Change to subdir before executing command
|
||||
save_cwd = os.getcwd()
|
||||
if not os.path.exists(destdir):
|
||||
os.makedirs(destdir)
|
||||
os.chdir(destdir)
|
||||
path = d.getVar('PATH', True)
|
||||
if path:
|
||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
||||
os.chdir(save_cwd)
|
||||
|
||||
if ret != 0:
|
||||
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
|
||||
|
||||
if 'deps' not in data[pkg]:
|
||||
return
|
||||
for dep in data[pkg]['deps']:
|
||||
self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
|
||||
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
dldir = d.getVar("DL_DIR", True)
|
||||
depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version)
|
||||
with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile:
|
||||
workobj = json.load(datafile)
|
||||
dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
|
||||
|
||||
self._unpackdep(ud, ud.pkgname, workobj, "%s/npmpkg" % destdir, dldir, d)
|
||||
|
||||
def _parse_view(self, output):
|
||||
'''
|
||||
Parse the output of npm view --json; the last JSON result
|
||||
is assumed to be the one that we're interested in.
|
||||
'''
|
||||
pdata = None
|
||||
outdeps = {}
|
||||
datalines = []
|
||||
bracelevel = 0
|
||||
for line in output.splitlines():
|
||||
if bracelevel:
|
||||
datalines.append(line)
|
||||
elif '{' in line:
|
||||
datalines = []
|
||||
datalines.append(line)
|
||||
bracelevel = bracelevel + line.count('{') - line.count('}')
|
||||
if datalines:
|
||||
pdata = json.loads('\n'.join(datalines))
|
||||
return pdata
|
||||
|
||||
def _getdependencies(self, pkg, data, version, d, ud, optional=False):
|
||||
pkgfullname = pkg
|
||||
if version != '*' and not '/' in version:
|
||||
pkgfullname += "@'%s'" % version
|
||||
logger.debug(2, "Calling getdeps on %s" % pkg)
|
||||
fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
|
||||
output = runfetchcmd(fetchcmd, d, True)
|
||||
pdata = self._parse_view(output)
|
||||
if not pdata:
|
||||
raise FetchError("The command '%s' returned no output" % fetchcmd)
|
||||
if optional:
|
||||
pkg_os = pdata.get('os', None)
|
||||
if pkg_os:
|
||||
if not isinstance(pkg_os, list):
|
||||
pkg_os = [pkg_os]
|
||||
if 'linux' not in pkg_os or '!linux' in pkg_os:
|
||||
logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
|
||||
return
|
||||
#logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
|
||||
outputurl = pdata['dist']['tarball']
|
||||
data[pkg] = {}
|
||||
data[pkg]['tgz'] = os.path.basename(outputurl)
|
||||
self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
|
||||
|
||||
dependencies = pdata.get('dependencies', {})
|
||||
optionalDependencies = pdata.get('optionalDependencies', {})
|
||||
depsfound = {}
|
||||
optdepsfound = {}
|
||||
data[pkg]['deps'] = {}
|
||||
for dep in dependencies:
|
||||
if dep in optionalDependencies:
|
||||
optdepsfound[dep] = dependencies[dep]
|
||||
else:
|
||||
depsfound[dep] = dependencies[dep]
|
||||
for dep, version in optdepsfound.iteritems():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
|
||||
for dep, version in depsfound.iteritems():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
|
||||
|
||||
def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
|
||||
logger.debug(2, "NPM shrinkwrap file is %s" % data)
|
||||
outputurl = "invalid"
|
||||
if ('resolved' not in data) or (not data['resolved'].startswith('http')):
|
||||
# will be the case for ${PN}
|
||||
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
|
||||
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
|
||||
outputurl = runfetchcmd(fetchcmd, d, True)
|
||||
else:
|
||||
outputurl = data['resolved']
|
||||
self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
|
||||
manifest[pkg] = {}
|
||||
manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
|
||||
manifest[pkg]['deps'] = {}
|
||||
|
||||
if pkg in lockdown:
|
||||
sha1_expected = lockdown[pkg][version]
|
||||
sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
|
||||
if sha1_expected != sha1_data:
|
||||
msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg)
|
||||
else:
|
||||
logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
|
||||
|
||||
if 'dependencies' in data:
|
||||
for obj in data['dependencies']:
|
||||
logger.debug(2, "Found dep is %s" % str(obj))
|
||||
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'])
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
jsondepobj = {}
|
||||
shrinkobj = {}
|
||||
lockdown = {}
|
||||
|
||||
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
|
||||
dest = d.getVar("DL_DIR", True)
|
||||
bb.utils.mkdirhier(dest)
|
||||
save_cwd = os.getcwd()
|
||||
os.chdir(dest)
|
||||
runfetchcmd("tar -xJf %s" % (ud.fullmirror), d)
|
||||
os.chdir(save_cwd)
|
||||
return
|
||||
|
||||
shwrf = d.getVar('NPM_SHRINKWRAP', True)
|
||||
logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
|
||||
try:
|
||||
with open(shwrf) as datafile:
|
||||
shrinkobj = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
|
||||
lckdf = d.getVar('NPM_LOCKDOWN', True)
|
||||
logger.debug(2, "NPM lockdown file is %s" % lckdf)
|
||||
try:
|
||||
with open(lckdf) as datafile:
|
||||
lockdown = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
|
||||
|
||||
if ('name' not in shrinkobj):
|
||||
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
|
||||
else:
|
||||
self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
|
||||
|
||||
with open(ud.localpath, 'w') as outfile:
|
||||
json.dump(jsondepobj, outfile)
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
save_cwd = os.getcwd()
|
||||
os.chdir(d.getVar("DL_DIR", True))
|
||||
logger.info("Creating tarball of npm data")
|
||||
runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
os.chdir(save_cwd)
|
||||
|
||||
@@ -34,13 +34,13 @@ class Osc(FetchMethod):
|
||||
|
||||
# Create paths to osc checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host)
|
||||
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||
|
||||
if 'rev' in ud.parm:
|
||||
ud.revision = ud.parm['rev']
|
||||
else:
|
||||
pv = d.getVar("PV", False)
|
||||
pv = data.getVar("PV", d, 0)
|
||||
rev = bb.fetch2.srcrev_internal_helper(ud, d)
|
||||
if rev and rev != True:
|
||||
ud.revision = rev
|
||||
@@ -84,7 +84,7 @@ class Osc(FetchMethod):
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK):
|
||||
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||
logger.info("Update "+ ud.url)
|
||||
# update sources there
|
||||
@@ -114,7 +114,7 @@ class Osc(FetchMethod):
|
||||
Generate a .oscrc to be used for this run.
|
||||
"""
|
||||
|
||||
config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc")
|
||||
config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
|
||||
if (os.path.exists(config_path)):
|
||||
os.remove(config_path)
|
||||
|
||||
@@ -123,8 +123,8 @@ class Osc(FetchMethod):
|
||||
f.write("apisrv = %s\n" % ud.host)
|
||||
f.write("scheme = http\n")
|
||||
f.write("su-wrapper = su -c\n")
|
||||
f.write("build-root = %s\n" % d.getVar('WORKDIR', True))
|
||||
f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True))
|
||||
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||
f.write("extra-pkgs = gzip\n")
|
||||
f.write("\n")
|
||||
f.write("[%s]\n" % ud.host)
|
||||
|
||||
@@ -48,7 +48,7 @@ class Perforce(FetchMethod):
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host, port) = d.getVar('P4PORT', False).split(':')
|
||||
(host, port) = d.getVar('P4PORT').split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
@@ -103,15 +103,22 @@ class Perforce(FetchMethod):
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which-1]
|
||||
|
||||
base = self._strip_leading_slashes(base)
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""
|
||||
@@ -123,7 +130,7 @@ class Perforce(FetchMethod):
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot[:depot.rfind('/')]
|
||||
path = depot
|
||||
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ class SFTP(FetchMethod):
|
||||
"""Fetch urls"""
|
||||
|
||||
urlo = URI(ud.url)
|
||||
basecmd = 'sftp -oBatchMode=yes'
|
||||
basecmd = 'sftp -oPasswordAuthentication=no'
|
||||
port = ''
|
||||
if urlo.port:
|
||||
port = '-P %d' % urlo.port
|
||||
|
||||
@@ -87,8 +87,7 @@ class SSH(FetchMethod):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
|
||||
os.path.basename(os.path.normpath(path)))
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
|
||||
def download(self, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
|
||||
@@ -54,11 +54,6 @@ class Svn(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if not "path_spec" in ud.parm:
|
||||
ud.path_spec = ud.module
|
||||
else:
|
||||
ud.path_spec = ud.parm["path_spec"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||
@@ -106,8 +101,7 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
transportuser = ud.parm.get("transportuser", "")
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
else:
|
||||
@@ -154,7 +148,7 @@ class Svn(FetchMethod):
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean SVN specific files and dirs """
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,9 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.utils import export_proxies
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import SoupStrainer
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -63,8 +57,6 @@ class Wget(FetchMethod):
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
if not ud.localfile:
|
||||
ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
|
||||
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
|
||||
|
||||
@@ -104,462 +96,11 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
import urllib2, socket, httplib
|
||||
from urllib import addinfourl
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
def checkstatus(self, ud, d):
|
||||
|
||||
class HTTPConnectionCache(httplib.HTTPConnection):
|
||||
if fetch.connection_cache:
|
||||
def connect(self):
|
||||
"""Connect to the host and port specified in __init__."""
|
||||
uri = ud.url.split(";")[0]
|
||||
fetchcmd = self.basecmd + " --spider '%s'" % uri
|
||||
|
||||
sock = fetch.connection_cache.get_connection(self.host, self.port)
|
||||
if sock:
|
||||
self.sock = sock
|
||||
else:
|
||||
self.sock = socket.create_connection((self.host, self.port),
|
||||
self.timeout, self.source_address)
|
||||
fetch.connection_cache.add_connection(self.host, self.port, self.sock)
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
if self._tunnel_host:
|
||||
self._tunnel()
|
||||
|
||||
class CacheHTTPHandler(urllib2.HTTPHandler):
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnectionCache, req)
|
||||
|
||||
def do_open(self, http_class, req):
|
||||
"""Return an addinfourl object for the request, using http_class.
|
||||
|
||||
http_class must implement the HTTPConnection API from httplib.
|
||||
The addinfourl return value is a file-like object. It also
|
||||
has methods and attributes including:
|
||||
- info(): return a mimetools.Message object for the headers
|
||||
- geturl(): return the original request URL
|
||||
- code: HTTP status code
|
||||
"""
|
||||
host = req.get_host()
|
||||
if not host:
|
||||
raise urlllib2.URLError('no host given')
|
||||
|
||||
h = http_class(host, timeout=req.timeout) # will parse host:port
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
||||
if k not in headers))
|
||||
|
||||
# We want to make an HTTP/1.1 request, but the addinfourl
|
||||
# class isn't prepared to deal with a persistent connection.
|
||||
# It will try to read all remaining data from the socket,
|
||||
# which will block while the server waits for the next request.
|
||||
# So make sure the connection gets closed after the (only)
|
||||
# request.
|
||||
|
||||
# Don't close connection when connection_cache is enabled,
|
||||
if fetch.connection_cache is None:
|
||||
headers["Connection"] = "close"
|
||||
else:
|
||||
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
|
||||
|
||||
headers = dict(
|
||||
(name.title(), val) for name, val in headers.items())
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
proxy_auth_hdr = "Proxy-Authorization"
|
||||
if proxy_auth_hdr in headers:
|
||||
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
||||
# Proxy-Authorization should not be sent to origin
|
||||
# server.
|
||||
del headers[proxy_auth_hdr]
|
||||
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
||||
|
||||
try:
|
||||
h.request(req.get_method(), req.get_selector(), req.data, headers)
|
||||
except socket.error, err: # XXX what error?
|
||||
# Don't close connection when cache is enabled.
|
||||
if fetch.connection_cache is None:
|
||||
h.close()
|
||||
raise urllib2.URLError(err)
|
||||
else:
|
||||
try:
|
||||
r = h.getresponse(buffering=True)
|
||||
except TypeError: # buffering kw not supported
|
||||
r = h.getresponse()
|
||||
|
||||
# Pick apart the HTTPResponse object to get the addinfourl
|
||||
# object initialized properly.
|
||||
|
||||
# Wrap the HTTPResponse object in socket's file object adapter
|
||||
# for Windows. That adapter calls recv(), so delegate recv()
|
||||
# to read(). This weird wrapping allows the returned object to
|
||||
# have readline() and readlines() methods.
|
||||
|
||||
# XXX It might be better to extract the read buffering code
|
||||
# out of socket._fileobject() and into a base class.
|
||||
r.recv = r.read
|
||||
|
||||
# no data, just have to read
|
||||
r.read()
|
||||
class fp_dummy(object):
|
||||
def read(self):
|
||||
return ""
|
||||
def readline(self):
|
||||
return ""
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
|
||||
# Close connection when server request it.
|
||||
if fetch.connection_cache is not None:
|
||||
if 'Connection' in r.msg and r.msg['Connection'] == 'close':
|
||||
fetch.connection_cache.remove_connection(h.host, h.port)
|
||||
|
||||
return resp
|
||||
|
||||
class HTTPMethodFallback(urllib2.BaseHandler):
|
||||
"""
|
||||
Fallback to GET if HEAD is not allowed (405 HTTP error)
|
||||
"""
|
||||
def http_error_405(self, req, fp, code, msg, headers):
|
||||
fp.read()
|
||||
fp.close()
|
||||
|
||||
newheaders = dict((k,v) for k,v in req.headers.items()
|
||||
if k.lower() not in ("content-length", "content-type"))
|
||||
return self.parent.open(urllib2.Request(req.get_full_url(),
|
||||
headers=newheaders,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True))
|
||||
|
||||
"""
|
||||
Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
|
||||
Forbidden when they actually mean 405 Method Not Allowed.
|
||||
"""
|
||||
http_error_403 = http_error_405
|
||||
|
||||
"""
|
||||
Some servers (e.g. FusionForge) returns 406 Not Acceptable when they
|
||||
actually mean 405 Method Not Allowed.
|
||||
"""
|
||||
http_error_406 = http_error_405
|
||||
|
||||
class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""
|
||||
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
|
||||
when we want to follow redirects using the original method.
|
||||
"""
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
|
||||
newreq.get_method = lambda: req.get_method()
|
||||
return newreq
|
||||
exported_proxies = export_proxies(d)
|
||||
|
||||
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
|
||||
if export_proxies:
|
||||
handlers.append(urllib2.ProxyHandler())
|
||||
handlers.append(CacheHTTPHandler())
|
||||
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
|
||||
# see PEP-0476, this causes verification errors on some https servers
|
||||
# so disable by default.
|
||||
import ssl
|
||||
if hasattr(ssl, '_create_unverified_context'):
|
||||
handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
|
||||
opener = urllib2.build_opener(*handlers)
|
||||
|
||||
try:
|
||||
uri = ud.url.split(";")[0]
|
||||
r = urllib2.Request(uri)
|
||||
r.get_method = lambda: "HEAD"
|
||||
opener.open(r)
|
||||
except urllib2.URLError as e:
|
||||
# debug for now to avoid spamming the logs in e.g. remote sstate searches
|
||||
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
pname = ''
|
||||
pver = ''
|
||||
ptype = ''
|
||||
|
||||
mdict = m.groupdict()
|
||||
if 'name' in mdict.keys():
|
||||
pname = mdict['name']
|
||||
if 'pver' in mdict.keys():
|
||||
pver = mdict['pver']
|
||||
if 'type' in mdict.keys():
|
||||
ptype = mdict['type']
|
||||
|
||||
bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
|
||||
|
||||
return (pname, pver, ptype)
|
||||
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.1000.', version)
|
||||
version = re.sub('(beta)+', '.100.', version)
|
||||
version = re.sub('(alpha)+', '.10.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return ""
|
||||
"""
|
||||
valid = 0
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
bb.debug(3, "line['href'] = '%s'" % (line['href']))
|
||||
bb.debug(3, "line = '%s'" % (str(line)))
|
||||
|
||||
newver = self._parse_path(package_regex, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(package_regex, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) < 0:
|
||||
version = newver
|
||||
|
||||
pupver = re.sub('_', '.', version[1])
|
||||
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, pupver or "N/A", current_version[1]))
|
||||
|
||||
if valid:
|
||||
return pupver
|
||||
|
||||
return ""
|
||||
|
||||
def _check_latest_version_by_dir(self, dirver, package, package_regex,
|
||||
current_version, ud, d):
|
||||
"""
|
||||
Scan every directory in order to get upstream version.
|
||||
"""
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group('ver')
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
sver = s.group('ver')
|
||||
|
||||
# When prefix is part of the version directory it need to
|
||||
# ensure that only version directory is used so remove previous
|
||||
# directories if exists.
|
||||
#
|
||||
# Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
|
||||
# result is v2.5.
|
||||
spfx = s.group('pfx').split('/')[-1]
|
||||
|
||||
version_dir_new = ['', sver, '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = spfx + sver
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
ud.user, ud.pswd, {}])
|
||||
|
||||
pupver = self._check_latest_version(uri,
|
||||
package, package_regex, current_version, ud, d)
|
||||
if pupver:
|
||||
version[1] = pupver
|
||||
|
||||
version_dir = version_dir_new
|
||||
|
||||
return version[1]
|
||||
|
||||
def _init_regexes(self, package, ud, d):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
version = self._parse_path(package_regex_comp, package)
|
||||
if version:
|
||||
package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
|
||||
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
|
||||
else:
|
||||
package_custom_regex_comp = None
|
||||
|
||||
return package_custom_regex_comp
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
current_version = ['', d.getVar('PV', True), '']
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return (current_version[1], '')
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ('', '')
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
|
||||
m = dirver_regex.search(path)
|
||||
if m:
|
||||
pn = d.getVar('PN', True)
|
||||
dirver = m.group('dirver')
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return (self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d), '')
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return (self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d), '')
|
||||
|
||||
@@ -1,479 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(Exception):
|
||||
pass
|
||||
|
||||
def present_options(optionlist):
|
||||
if len(optionlist) > 1:
|
||||
return ' or '.join([', '.join(optionlist[:-1]), optionlist[-1]])
|
||||
else:
|
||||
return optionlist[0]
|
||||
|
||||
class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
def format_option(self, option):
|
||||
# We need to do this here rather than in the text we supply to
|
||||
# add_option() because we don't want to call list_extension_modules()
|
||||
# on every execution (since it imports all of the modules)
|
||||
# Note also that we modify option.help rather than the returned text
|
||||
# - this is so that we don't have to re-format the text ourselves
|
||||
if option.dest == 'ui':
|
||||
valid_uis = list_extension_modules(bb.ui, 'main')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
|
||||
elif option.dest == 'servertype':
|
||||
valid_server_types = list_extension_modules(bb.server, 'BitBakeServer')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_server_types))
|
||||
|
||||
return optparse.IndentedHelpFormatter.format_option(self, option)
|
||||
|
||||
def list_extension_modules(pkg, checkattr):
|
||||
"""
|
||||
Lists extension modules in a specific Python package
|
||||
(e.g. UIs, servers). NOTE: Calling this function will import all of the
|
||||
submodules of the specified module in order to check for the specified
|
||||
attribute; this can have unusual side-effects. As a result, this should
|
||||
only be called when displaying help text or error messages.
|
||||
Parameters:
|
||||
pkg: previously imported Python package to list
|
||||
checkattr: attribute to look for in module to determine if it's valid
|
||||
as the type of extension you are looking for
|
||||
"""
|
||||
import pkgutil
|
||||
pkgdir = os.path.dirname(pkg.__file__)
|
||||
|
||||
modules = []
|
||||
for _, modulename, _ in pkgutil.iter_modules([pkgdir]):
|
||||
if os.path.isdir(os.path.join(pkgdir, modulename)):
|
||||
# ignore directories
|
||||
continue
|
||||
try:
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
except:
|
||||
# If we can't import it, it's not valid
|
||||
continue
|
||||
module_if = getattr(module, modulename)
|
||||
if getattr(module_if, 'hidden_extension', False):
|
||||
continue
|
||||
if not checkattr or hasattr(module_if, checkattr):
|
||||
modules.append(modulename)
|
||||
return modules
|
||||
|
||||
def import_extension_module(pkg, modulename, checkattr):
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
return getattr(module, modulename)
|
||||
except AttributeError:
|
||||
modules = present_options(list_extension_modules(pkg, checkattr))
|
||||
raise BBMainException('FATAL: Unable to import extension module "%s" from %s. '
|
||||
'Valid extension modules: %s' % (modulename, pkg.__name__, modules))
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warning(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
formatter=BitbakeHelpFormatter(),
|
||||
version="BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage="""%prog [options] [recipename/target recipe:do_task ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
|
||||
help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
|
||||
"not handle any dependencies from other recipes.")
|
||||
|
||||
parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
|
||||
help="Continue as much as possible after an error. While the target that "
|
||||
"failed and anything depending on it cannot be built, as much as "
|
||||
"possible will be built before stopping.")
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", action="store_true",
|
||||
dest="tryaltconfigs", default=False,
|
||||
help="Continue with builds by trying to use alternative providers "
|
||||
"where possible.")
|
||||
|
||||
parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
|
||||
help="Force the specified targets/task to run (invalidating any "
|
||||
"existing stamp file).")
|
||||
|
||||
parser.add_option("-c", "--cmd", action="store", dest="cmd",
|
||||
help="Specify the task to execute. The exact options available "
|
||||
"depend on the metadata. Some examples might be 'compile'"
|
||||
" or 'populate_sysroot' or 'listtasks' may give a list of "
|
||||
"the tasks available.")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
|
||||
help="Invalidate the stamp for the specified task such as 'compile' "
|
||||
"and then run the default task for the specified target(s).")
|
||||
|
||||
parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
|
||||
help="Read the specified file before bitbake.conf.")
|
||||
|
||||
parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
|
||||
help="Read the specified file after bitbake.conf.")
|
||||
|
||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
|
||||
help="Output more log message data to the terminal.")
|
||||
|
||||
parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
|
||||
help="Increase the debug level. You can specify this more than once.")
|
||||
|
||||
parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
|
||||
help="Don't execute, just go through the motions.")
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
|
||||
default=[], metavar="SIGNATURE_HANDLER",
|
||||
help="Dump out the signature construction information, with no task "
|
||||
"execution. The SIGNATURE_HANDLER parameter is passed to the "
|
||||
"handler. Two common values are none and printdiff but the handler "
|
||||
"may define more/less. none means only dump the signature, printdiff"
|
||||
" means compare the dumped signature with the cached one.")
|
||||
|
||||
parser.add_option("-p", "--parse-only", action="store_true",
|
||||
dest="parse_only", default=False,
|
||||
help="Quit after parsing the BB recipes.")
|
||||
|
||||
parser.add_option("-s", "--show-versions", action="store_true",
|
||||
dest="show_versions", default=False,
|
||||
help="Show current and preferred versions of all recipes.")
|
||||
|
||||
parser.add_option("-e", "--environment", action="store_true",
|
||||
dest="show_environment", default=False,
|
||||
help="Show the global or per-recipe environment complete with information"
|
||||
" about where variables were set/changed.")
|
||||
|
||||
parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
|
||||
help="Save dependency tree information for the specified "
|
||||
"targets in the dot syntax.")
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", action="append",
|
||||
dest="extra_assume_provided", default=[],
|
||||
help="Assume these dependencies don't exist and are already provided "
|
||||
"(equivalent to ASSUME_PROVIDED). Useful to make dependency "
|
||||
"graphs more appealing")
|
||||
|
||||
parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
|
||||
help="Show debug logging for the specified logging domains")
|
||||
|
||||
parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
|
||||
help="Profile the command and save reports.")
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-u", "--ui", action="store", dest="ui",
|
||||
default=os.environ.get('BITBAKE_UI', 'knotty'),
|
||||
help="The user interface to use (@CHOICES@ - default %default).")
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-t", "--servertype", action="store", dest="servertype",
|
||||
default=["process", "xmlrpc"]["BBSERVER" in os.environ],
|
||||
help="Choose which server type to use (@CHOICES@ - default %default).")
|
||||
|
||||
parser.add_option("", "--token", action="store", dest="xmlrpctoken",
|
||||
default=os.environ.get("BBTOKEN"),
|
||||
help="Specify the connection token to be used when connecting "
|
||||
"to a remote server.")
|
||||
|
||||
parser.add_option("", "--revisions-changed", action="store_true",
|
||||
dest="revisions_changed", default=False,
|
||||
help="Set the exit code depending on whether upstream floating "
|
||||
"revisions have changed or not.")
|
||||
|
||||
parser.add_option("", "--server-only", action="store_true",
|
||||
dest="server_only", default=False,
|
||||
help="Run bitbake without a UI, only starting a server "
|
||||
"(cooker) process.")
|
||||
|
||||
parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
|
||||
help="The name/address for the bitbake server to bind to.")
|
||||
|
||||
parser.add_option("", "--no-setscene", action="store_true",
|
||||
dest="nosetscene", default=False,
|
||||
help="Do not run any setscene tasks. sstate will be ignored and "
|
||||
"everything needed, built.")
|
||||
|
||||
parser.add_option("", "--setscene-only", action="store_true",
|
||||
dest="setsceneonly", default=False,
|
||||
help="Only run setscene tasks, don't run any real tasks.")
|
||||
|
||||
parser.add_option("", "--remote-server", action="store", dest="remote_server",
|
||||
default=os.environ.get("BBSERVER"),
|
||||
help="Connect to the specified server.")
|
||||
|
||||
parser.add_option("-m", "--kill-server", action="store_true",
|
||||
dest="kill_server", default=False,
|
||||
help="Terminate the remote server.")
|
||||
|
||||
parser.add_option("", "--observe-only", action="store_true",
|
||||
dest="observe_only", default=False,
|
||||
help="Connect to a server as an observing-only client.")
|
||||
|
||||
parser.add_option("", "--status-only", action="store_true",
|
||||
dest="status_only", default=False,
|
||||
help="Check the status of the remote bitbake server.")
|
||||
|
||||
parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
|
||||
default=os.environ.get("BBEVENTLOG"),
|
||||
help="Writes the event log of the build to a bitbake event json file. "
|
||||
"Use '' (empty string) to assign the name automatically.")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# use configuration files from environment variables
|
||||
if "BBPRECONF" in os.environ:
|
||||
option.prefile.append(os.environ["BBPRECONF"])
|
||||
|
||||
if "BBPOSTCONF" in os.environ:
|
||||
option.postfile.append(os.environ["BBPOSTCONF"])
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
from datetime import datetime
|
||||
eventlog = "bitbake_eventlog_%s.json" % datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
options.writeeventlog = eventlog
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
single_use = not configParams.server_only
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)), single_use)
|
||||
configuration.interface = [server.serverImpl.host, server.serverImpl.port]
|
||||
else:
|
||||
server.initServer(single_use=single_use)
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
def bitbake_main(configParams, configuration):
|
||||
|
||||
# Python multiprocessing requires /dev/shm on Linux
|
||||
if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
|
||||
servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
|
||||
"servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
|
||||
"to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option"))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--remote-server' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to "
|
||||
"terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if configParams.server_only:
|
||||
for param in ('prefile', 'postfile'):
|
||||
value = getattr(configParams, param)
|
||||
if value:
|
||||
setattr(configuration, "%s_server" % param, value)
|
||||
param = "%s_server" % param
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
|
||||
configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
server_connection.setupEventQueue()
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events,
|
||||
configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host,
|
||||
server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
@@ -19,22 +19,11 @@
|
||||
|
||||
from bb.utils import better_compile, better_exec
|
||||
|
||||
def insert_method(modulename, code, fn, lineno):
|
||||
def insert_method(modulename, code, fn):
|
||||
"""
|
||||
Add code of a module should be added. The methods
|
||||
will be simply added, no checking will be done
|
||||
"""
|
||||
comp = better_compile(code, modulename, fn, lineno=lineno)
|
||||
comp = better_compile(code, modulename, fn )
|
||||
better_exec(comp, None, code, fn)
|
||||
|
||||
compilecache = {}
|
||||
|
||||
def compile_cache(code):
|
||||
h = hash(code)
|
||||
if h in compilecache:
|
||||
return compilecache[h]
|
||||
return None
|
||||
|
||||
def compile_cache_add(code, compileobj):
|
||||
h = hash(code)
|
||||
compilecache[h] = compileobj
|
||||
|
||||
@@ -52,10 +52,10 @@ def getMountedDev(path):
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parent's, then the current directory is a mount point
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parent's directory
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
@@ -77,7 +77,7 @@ def getDiskData(BBDirs, configuration):
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directory is located on the same
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
@@ -187,11 +187,11 @@ class diskMonitor:
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid printing too many warning messages
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid printing the message
|
||||
# repeatedly while waiting for the tasks to finish
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for k in self.devDict:
|
||||
self.preFreeS[k] = 0
|
||||
@@ -220,7 +220,7 @@ class diskMonitor:
|
||||
if minSpace and freeSpace < minSpace:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
|
||||
logger.warning("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
(path, dev, freeSpace / 1024 / 1024 / 1024.0))
|
||||
self.preFreeS[k] = freeSpace
|
||||
|
||||
@@ -239,14 +239,16 @@ class diskMonitor:
|
||||
freeInode = st.f_favail
|
||||
|
||||
if minInode and freeInode < minInode:
|
||||
# Some filesystems use dynamic inodes so can't run out
|
||||
# (e.g. btrfs). This is reported by the inode count being 0.
|
||||
# Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
|
||||
# zero, this is a feature of the fs, we disable the inode
|
||||
# checking for such a fs.
|
||||
if st.f_files == 0:
|
||||
logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
|
||||
self.devDict[k][2] = None
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
|
||||
logger.warning("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
(path, dev, freeInode / 1024.0))
|
||||
self.preFreeI[k] = freeInode
|
||||
|
||||
|
||||
@@ -90,9 +90,8 @@ class BBLogFormatter(logging.Formatter):
|
||||
if self.color_enabled:
|
||||
record = self.colorize(record)
|
||||
msg = logging.Formatter.format(self, record)
|
||||
if hasattr(record, 'bb_exc_formatted'):
|
||||
msg += '\n' + ''.join(record.bb_exc_formatted)
|
||||
elif hasattr(record, 'bb_exc_info'):
|
||||
|
||||
if hasattr(record, 'bb_exc_info'):
|
||||
etype, value, tb = record.bb_exc_info
|
||||
formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
msg += '\n' + ''.join(formatted)
|
||||
@@ -151,7 +150,7 @@ loggerDefaultVerbose = False
|
||||
loggerVerboseLogs = False
|
||||
loggerDefaultDomains = []
|
||||
|
||||
def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
"""
|
||||
Set default verbosity and debug levels config the logger
|
||||
"""
|
||||
@@ -159,10 +158,7 @@ def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
bb.msg.loggerDefaultVerbose = verbose
|
||||
if verbose:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
if debug_domains:
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
else:
|
||||
bb.msg.loggerDefaultDomains = []
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
|
||||
def constructLogOptions():
|
||||
debug = loggerDefaultDebugLevel
|
||||
|
||||
@@ -202,8 +202,8 @@ if __name__ == '__main__':
|
||||
print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
|
||||
print(rec5._replace(k=222).count(2)) # MyMixIn's
|
||||
|
||||
# Note that behavior: the standard namedtuple methods cannot be
|
||||
# overridden by a foreign mix-in -- even if the mix-in is declared
|
||||
# None that behavior: the standard namedtuple methods cannot be
|
||||
# overriden by a foreign mix-in -- even if the mix-in is declared
|
||||
# as the leftmost base class (but, obviously, you can override them
|
||||
# in the defined class or its subclasses):
|
||||
|
||||
|
||||
@@ -26,10 +26,9 @@ File parsers for the BitBake build tools.
|
||||
|
||||
handlers = []
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import logging
|
||||
import bb
|
||||
import bb.utils
|
||||
import bb.siggen
|
||||
@@ -50,11 +49,8 @@ class ParseError(Exception):
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipRecipe(Exception):
|
||||
"""Exception raised to skip this recipe"""
|
||||
|
||||
class SkipPackage(SkipRecipe):
|
||||
"""Exception raised to skip this recipe (use SkipRecipe in new code)"""
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
|
||||
__mtime_cache = {}
|
||||
def cached_mtime(f):
|
||||
@@ -71,23 +67,13 @@ def cached_mtime_noerror(f):
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_mtime(f):
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
if f in __mtime_cache:
|
||||
del __mtime_cache[f]
|
||||
return 0
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_cache(f):
|
||||
if f in __mtime_cache:
|
||||
logger.debug(1, "Updating mtime cache for %s" % f)
|
||||
update_mtime(f)
|
||||
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
if s not in deps:
|
||||
deps.append(s)
|
||||
@@ -95,7 +81,7 @@ def mark_dependency(d, f):
|
||||
|
||||
def check_dependency(d, f):
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
return s in deps
|
||||
|
||||
def supports(fn, data):
|
||||
@@ -128,13 +114,14 @@ def resolve_file(fn, d):
|
||||
for af in attempts:
|
||||
mark_dependency(d, af)
|
||||
if not newfn:
|
||||
raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
fn = newfn
|
||||
|
||||
mark_dependency(d, fn)
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError(errno.ENOENT, "file %s not found" % fn)
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
logger.debug(2, "LOAD %s", fn)
|
||||
return fn
|
||||
|
||||
# Used by OpenEmbedded metadata
|
||||
@@ -161,8 +148,8 @@ def vars_from_file(mypkg, d):
|
||||
def get_file_depends(d):
|
||||
'''Return the dependent files'''
|
||||
dep_files = []
|
||||
depends = d.getVar('__base_depends', False) or []
|
||||
depends = depends + (d.getVar('__depends', False) or [])
|
||||
depends = d.getVar('__base_depends', True) or []
|
||||
depends = depends + (d.getVar('__depends', True) or [])
|
||||
for (fn, _) in depends:
|
||||
dep_files.append(os.path.abspath(fn))
|
||||
return " ".join(dep_files)
|
||||
|
||||
@@ -83,9 +83,9 @@ class DataNode(AstNode):
|
||||
|
||||
def getFunc(self, key, data):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], expand=False, noweakdefault=True)
|
||||
return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True, parsing=True)
|
||||
return data.getVar(key, noweakdefault=True)
|
||||
|
||||
def eval(self, data):
|
||||
groupd = self.groupd
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -136,42 +136,29 @@ class DataNode(AstNode):
|
||||
if flag:
|
||||
data.setVarFlag(key, flag, val, **loginfo)
|
||||
else:
|
||||
data.setVar(key, val, parsing=True, **loginfo)
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%', '______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.func_name = func_name
|
||||
self.body = body
|
||||
self.python = python
|
||||
self.fakeroot = fakeroot
|
||||
|
||||
def eval(self, data):
|
||||
text = '\n'.join(self.body)
|
||||
funcname = self.func_name
|
||||
if self.func_name == "__anonymous":
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
|
||||
self.python = True
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
bb.methodpool.insert_method(funcname, text, self.filename, self.lineno - len(self.body))
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
if data.getVar(funcname, False):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.delVarFlag(funcname, 'python')
|
||||
data.delVarFlag(funcname, 'fakeroot')
|
||||
if self.python:
|
||||
data.setVarFlag(funcname, "python", "1")
|
||||
if self.fakeroot:
|
||||
data.setVarFlag(funcname, "fakeroot", "1")
|
||||
data.setVarFlag(funcname, "func", 1)
|
||||
data.setVar(funcname, text, parsing=True)
|
||||
data.setVarFlag(funcname, 'filename', self.filename)
|
||||
data.setVarFlag(funcname, 'lineno', str(self.lineno - len(self.body)))
|
||||
data.setVar(funcname, text)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, text)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, modulename, body):
|
||||
@@ -185,12 +172,31 @@ class PythonMethodNode(AstNode):
|
||||
# 'this' file. This means we will not parse methods from
|
||||
# bb classes twice
|
||||
text = '\n'.join(self.body)
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename, self.lineno - len(self.body) - 1)
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text, parsing=True)
|
||||
data.setVarFlag(self.function, 'filename', self.filename)
|
||||
data.setVarFlag(self.function, 'lineno', str(self.lineno - len(self.body) - 1))
|
||||
data.setVar(self.function, text)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.key = key
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
data.setVarFlag(self.key, 'fakeroot', None)
|
||||
if self.m.group("py") is not None:
|
||||
data.setVarFlag(self.key, "python", "1")
|
||||
else:
|
||||
data.delVarFlag(self.key, "python")
|
||||
if self.m.group("fr") is not None:
|
||||
data.setVarFlag(self.key, "fakeroot", "1")
|
||||
else:
|
||||
data.delVarFlag(self.key, "fakeroot")
|
||||
|
||||
class ExportFuncsNode(AstNode):
|
||||
def __init__(self, filename, lineno, fns, classname):
|
||||
@@ -203,28 +209,24 @@ class ExportFuncsNode(AstNode):
|
||||
for func in self.n:
|
||||
calledfunc = self.classname + "_" + func
|
||||
|
||||
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False):
|
||||
if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
|
||||
continue
|
||||
|
||||
if data.getVar(func, False):
|
||||
if data.getVar(func):
|
||||
data.setVarFlag(func, 'python', None)
|
||||
data.setVarFlag(func, 'func', None)
|
||||
|
||||
for flag in [ "func", "python" ]:
|
||||
if data.getVarFlag(calledfunc, flag, False):
|
||||
data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
|
||||
if data.getVarFlag(calledfunc, flag):
|
||||
data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag))
|
||||
for flag in [ "dirs" ]:
|
||||
if data.getVarFlag(func, flag, False):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
|
||||
data.setVarFlag(func, "filename", "autogenerated")
|
||||
data.setVarFlag(func, "lineno", 1)
|
||||
if data.getVarFlag(func, flag):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
|
||||
|
||||
if data.getVarFlag(calledfunc, "python", False):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
|
||||
if data.getVarFlag(calledfunc, "python"):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
|
||||
else:
|
||||
if "-" in self.classname:
|
||||
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
|
||||
data.setVar(func, " " + calledfunc + "\n", parsing=True)
|
||||
data.setVar(func, " " + calledfunc + "\n")
|
||||
data.setVarFlag(func, 'export_func', '1')
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
@@ -251,7 +253,7 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS', False) or []
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
@@ -274,12 +276,15 @@ def handleExport(statements, filename, lineno, m):
|
||||
def handleData(statements, filename, lineno, groupd):
|
||||
statements.append(DataNode(filename, lineno, groupd))
|
||||
|
||||
def handleMethod(statements, filename, lineno, func_name, body, python, fakeroot):
|
||||
statements.append(MethodNode(filename, lineno, func_name, body, python, fakeroot))
|
||||
def handleMethod(statements, filename, lineno, func_name, body):
|
||||
statements.append(MethodNode(filename, lineno, func_name, body))
|
||||
|
||||
def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
|
||||
statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
|
||||
|
||||
def handleMethodFlags(statements, filename, lineno, key, m):
|
||||
statements.append(MethodFlagsNode(filename, lineno, key, m))
|
||||
|
||||
def handleExportFuncs(statements, filename, lineno, m, classname):
|
||||
statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
|
||||
|
||||
@@ -308,24 +313,23 @@ def handleInherit(statements, filename, lineno, m):
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
# try to add the handler
|
||||
handlerfn = d.getVarFlag(var, "filename", False)
|
||||
handlerln = int(d.getVarFlag(var, "lineno", False))
|
||||
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln)
|
||||
bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS", False) or []:
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
deltasklist = d.getVar('__BBDELTASKS') or []
|
||||
bb.build.add_tasks(tasklist, deltasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
@@ -333,10 +337,8 @@ def finalize(fn, d, variant = None):
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def _create_variants(datastores, names, function):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
if onlyfinalise and name not in onlyfinalise:
|
||||
return
|
||||
new_d = bb.data.createCopy(orig_d)
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
@@ -373,7 +375,7 @@ def _expand_versions(versions):
|
||||
def multi_finalize(fn, d):
|
||||
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
||||
for append in appends:
|
||||
logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
|
||||
logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
|
||||
bb.parse.BBHandler.handle(append, d, True)
|
||||
|
||||
onlyfinalise = d.getVar("__ONLYFINALISE", False)
|
||||
@@ -382,7 +384,7 @@ def multi_finalize(fn, d):
|
||||
d = bb.data.createCopy(safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
datastores = {"": safe_d}
|
||||
|
||||
@@ -425,10 +427,10 @@ def multi_finalize(fn, d):
|
||||
verfunc(pv, d, safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
_create_variants(datastores, versions, verfunc, onlyfinalise)
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
extended = d.getVar("BBCLASSEXTEND", True) or ""
|
||||
if extended:
|
||||
@@ -458,14 +460,14 @@ def multi_finalize(fn, d):
|
||||
bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
|
||||
@@ -32,7 +32,7 @@ import bb.build, bb.utils
|
||||
from bb import data
|
||||
|
||||
from . import ConfHandler
|
||||
from .. import resolve_file, ast, logger, ParseError
|
||||
from .. import resolve_file, ast, logger
|
||||
from .ConfHandler import include, init
|
||||
|
||||
# For compatibility
|
||||
@@ -47,26 +47,37 @@ __addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
|
||||
__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
|
||||
__infunc__ = []
|
||||
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
|
||||
cached_statements = {}
|
||||
|
||||
# We need to indicate EOF to the feeder. This code is so messy that
|
||||
# factoring it out to a close_parse_file method is out of question.
|
||||
# We will use the IN_PYTHON_EOF as an indicator to just close the method
|
||||
#
|
||||
# The two parts using it are tightly integrated anyway
|
||||
IN_PYTHON_EOF = -9999999999999
|
||||
|
||||
|
||||
|
||||
def supports(fn, d):
|
||||
"""Return True if fn has a supported extension"""
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not os.path.isabs(file):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
dname = os.path.dirname(fn)
|
||||
bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
|
||||
for af in attempts:
|
||||
if af != abs_fn:
|
||||
@@ -75,11 +86,11 @@ def inherit(files, fn, lineno, d):
|
||||
file = abs_fn
|
||||
|
||||
if not file in __inherit_cache:
|
||||
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -100,7 +111,7 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
file.close()
|
||||
if __inpython__:
|
||||
# add a blank line to close out any python definition
|
||||
feeder(lineno, "", filename, base_name, statements, eof=True)
|
||||
feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
|
||||
|
||||
if filename.endswith(".bbclass") or filename.endswith(".inc"):
|
||||
cached_statements[absolute_filename] = statements
|
||||
@@ -109,23 +120,29 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
def handle(fn, d, include):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
|
||||
__body__ = []
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
|
||||
if include == 0:
|
||||
logger.debug(2, "BB %s: handle(data)", fn)
|
||||
else:
|
||||
logger.debug(2, "BB %s: handle(data, include)", fn)
|
||||
|
||||
base_name = os.path.basename(fn)
|
||||
(root, ext) = os.path.splitext(base_name)
|
||||
init(d)
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE', False)
|
||||
oldfile = d.getVar('FILE')
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -138,36 +155,31 @@ def handle(fn, d, include):
|
||||
statements = get_statements(fn, abs_fn, base_name)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
if ext != ".bbclass" and abs_fn != oldfile:
|
||||
if ext != ".bbclass":
|
||||
d.setVar('FILE', abs_fn)
|
||||
|
||||
try:
|
||||
statements.eval(d)
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
bb.data.setVar("__SKIPPED", True, d)
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
if __infunc__:
|
||||
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
|
||||
if __residue__:
|
||||
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
|
||||
|
||||
if ext != ".bbclass" and include == 0:
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
|
||||
return d
|
||||
|
||||
def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
def feeder(lineno, s, fn, root, statements):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__, __infunc__[3], __infunc__[4])
|
||||
__infunc__ = []
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
@@ -175,7 +187,7 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
|
||||
if __inpython__:
|
||||
m = __python_func_regexp__.match(s)
|
||||
if m and not eof:
|
||||
if m and lineno != IN_PYTHON_EOF:
|
||||
__body__.append(s)
|
||||
return
|
||||
else:
|
||||
@@ -184,7 +196,7 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
__body__ = []
|
||||
__inpython__ = False
|
||||
|
||||
if eof:
|
||||
if lineno == IN_PYTHON_EOF:
|
||||
return
|
||||
|
||||
if s and s[0] == '#':
|
||||
@@ -211,7 +223,8 @@ def feeder(lineno, s, fn, root, statements, eof=False):
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = [m.group("func") or "__anonymous", fn, lineno, m.group("py") is not None, m.group("fr") is not None]
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import os
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"""
|
||||
^
|
||||
@@ -59,7 +58,7 @@ __require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR', False)
|
||||
topdir = data.getVar('TOPDIR')
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
|
||||
@@ -67,43 +66,40 @@ def init(data):
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(parentfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
"""
|
||||
if parentfn == fn: # prevent infinite recursion
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
parentfn = data.expand(parentfn)
|
||||
oldfn = data.expand(oldfn)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(parentfn)
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
bb.parse.handle(fn, data, True)
|
||||
except (IOError, OSError) as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
else:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
|
||||
else:
|
||||
raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno)
|
||||
ret = handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
@@ -119,7 +115,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE', False)
|
||||
oldfile = data.getVar('FILE')
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
|
||||
@@ -199,10 +199,7 @@ class PersistData(object):
|
||||
del self.data[domain][key]
|
||||
|
||||
def connect(database):
|
||||
connection = sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
connection.execute("pragma synchronous = off;")
|
||||
connection.text_factory = str
|
||||
return connection
|
||||
return sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
|
||||
@@ -64,7 +64,7 @@ class Popen(subprocess.Popen):
|
||||
options.update(kwargs)
|
||||
subprocess.Popen.__init__(self, *args, **options)
|
||||
|
||||
def _logged_communicate(pipe, log, input, extrafiles):
|
||||
def _logged_communicate(pipe, log, input):
|
||||
if pipe.stdin:
|
||||
if input is not None:
|
||||
pipe.stdin.write(input)
|
||||
@@ -79,20 +79,6 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
for fobj, _ in extrafiles:
|
||||
bb.utils.nonblockingfd(fobj.fileno())
|
||||
rin.append(fobj)
|
||||
|
||||
def readextras(selected):
|
||||
for fobj, func in extrafiles:
|
||||
if fobj in selected:
|
||||
try:
|
||||
data = fobj.read()
|
||||
except IOError as err:
|
||||
if err.errno == errno.EAGAIN or err.errno == errno.EWOULDBLOCK:
|
||||
data = None
|
||||
if data is not None:
|
||||
func(data)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
@@ -114,27 +100,18 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
readextras(r)
|
||||
|
||||
finally:
|
||||
log.flush()
|
||||
|
||||
readextras([fobj for fobj, _ in extrafiles])
|
||||
|
||||
if pipe.stdout is not None:
|
||||
pipe.stdout.close()
|
||||
if pipe.stderr is not None:
|
||||
pipe.stderr.close()
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
"""Convenience function to run a command and return its output, raising an
|
||||
exception when the command fails"""
|
||||
|
||||
if not extrafiles:
|
||||
extrafiles = []
|
||||
|
||||
if isinstance(cmd, basestring) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
@@ -147,7 +124,7 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
raise CmdError(cmd, exc)
|
||||
|
||||
if log:
|
||||
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
|
||||
stdout, stderr = _logged_communicate(pipe, log, input)
|
||||
else:
|
||||
stdout, stderr = pipe.communicate(input)
|
||||
|
||||
|
||||
@@ -121,14 +121,11 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
preferred_file = None
|
||||
preferred_ver = None
|
||||
|
||||
# pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
|
||||
# hence we do this manually rather than use OVERRIDES
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True)
|
||||
if not preferred_v:
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True)
|
||||
if not preferred_v:
|
||||
preferred_v = cfgData.getVar("PREFERRED_VERSION", True)
|
||||
localdata = data.createCopy(cfgData)
|
||||
localdata.setVar('OVERRIDES', "%s:pn-%s:%s" % (data.getVar('OVERRIDES', localdata), pn, pn))
|
||||
bb.data.update_data(localdata)
|
||||
|
||||
preferred_v = localdata.getVar('PREFERRED_VERSION', True)
|
||||
if preferred_v:
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
@@ -226,7 +223,7 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
def _filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
"""
|
||||
eligible = []
|
||||
preferred_versions = {}
|
||||
@@ -283,7 +280,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||
def filterProviders(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
Takes a "normal" target item
|
||||
"""
|
||||
|
||||
@@ -311,56 +308,38 @@ def filterProviders(providers, item, cfgData, dataCache):
|
||||
def filterProvidersRunTime(providers, item, cfgData, dataCache):
|
||||
"""
|
||||
Take a list of providers and filter/reorder according to the
|
||||
environment variables
|
||||
environment variables and previous build results
|
||||
Takes a "runtime" target item
|
||||
"""
|
||||
|
||||
eligible = _filterProviders(providers, item, cfgData, dataCache)
|
||||
|
||||
# First try and match any PREFERRED_RPROVIDER entry
|
||||
prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True)
|
||||
foundUnique = False
|
||||
if prefervar:
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
if prefervar == pn:
|
||||
logger.verbose("selecting %s to satisfy %s due to PREFERRED_RPROVIDER", pn, item)
|
||||
eligible.remove(p)
|
||||
eligible = [p] + eligible
|
||||
foundUnique = True
|
||||
numberPreferred = 1
|
||||
# Should use dataCache.preferred here?
|
||||
preferred = []
|
||||
preferred_vars = []
|
||||
pns = {}
|
||||
for p in eligible:
|
||||
pns[dataCache.pkg_fn[p]] = p
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
|
||||
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
|
||||
preferred_vars.append(var)
|
||||
pref = pns[prefervar]
|
||||
eligible.remove(pref)
|
||||
eligible = [pref] + eligible
|
||||
preferred.append(pref)
|
||||
break
|
||||
|
||||
# If we didn't find an RPROVIDER entry, try and infer the provider from PREFERRED_PROVIDER entries
|
||||
# by looking through the provides of each eligible recipe and seeing if a PREFERRED_PROVIDER was set.
|
||||
# This is most useful for virtual/ entries rather than having a RPROVIDER per entry.
|
||||
if not foundUnique:
|
||||
# Should use dataCache.preferred here?
|
||||
preferred = []
|
||||
preferred_vars = []
|
||||
pns = {}
|
||||
for p in eligible:
|
||||
pns[dataCache.pkg_fn[p]] = p
|
||||
for p in eligible:
|
||||
pn = dataCache.pkg_fn[p]
|
||||
provides = dataCache.pn_provides[pn]
|
||||
for provide in provides:
|
||||
prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True)
|
||||
#logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
|
||||
if prefervar in pns and pns[prefervar] not in preferred:
|
||||
var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
|
||||
logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
|
||||
preferred_vars.append(var)
|
||||
pref = pns[prefervar]
|
||||
eligible.remove(pref)
|
||||
eligible = [pref] + eligible
|
||||
preferred.append(pref)
|
||||
break
|
||||
|
||||
numberPreferred = len(preferred)
|
||||
numberPreferred = len(preferred)
|
||||
|
||||
if numberPreferred > 1:
|
||||
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s. You could set PREFERRED_RPROVIDER_%s" % (item, preferred, preferred_vars, item))
|
||||
logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
|
||||
|
||||
logger.debug(1, "sorted runtime providers for %s are: %s", item, eligible)
|
||||
|
||||
@@ -400,29 +379,3 @@ def getRuntimeProviders(dataCache, rdepend):
|
||||
logger.debug(1, "Assuming %s is a dynamic package, but it may not exist" % rdepend)
|
||||
|
||||
return rproviders
|
||||
|
||||
|
||||
def buildWorldTargetList(dataCache):
|
||||
"""
|
||||
Build package list for "bitbake world"
|
||||
"""
|
||||
if dataCache.world_target:
|
||||
return
|
||||
|
||||
logger.debug(1, "collating packages for \"world\"")
|
||||
for f in dataCache.possible_world:
|
||||
terminal = True
|
||||
pn = dataCache.pkg_fn[f]
|
||||
|
||||
for p in dataCache.pn_provides[pn]:
|
||||
if p.startswith('virtual/'):
|
||||
logger.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
|
||||
terminal = False
|
||||
break
|
||||
for pf in dataCache.providers[p]:
|
||||
if dataCache.pkg_fn[pf] != pn:
|
||||
logger.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
|
||||
terminal = False
|
||||
break
|
||||
if terminal:
|
||||
dataCache.world_target.add(pn)
|
||||
|
||||
@@ -38,4 +38,4 @@ class ExitSignal(ShellError):
|
||||
|
||||
class ReturnSignal(ShellError):
|
||||
"""Exit signal."""
|
||||
pass
|
||||
pass
|
||||
@@ -97,7 +97,7 @@ class RunQueueScheduler(object):
|
||||
def __init__(self, runqueue, rqdata):
|
||||
"""
|
||||
The default scheduler just returns the first buildable task (the
|
||||
priority map is sorted by task number)
|
||||
priority map is sorted by task numer)
|
||||
"""
|
||||
self.rq = runqueue
|
||||
self.rqdata = rqdata
|
||||
@@ -186,7 +186,7 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
|
||||
"""
|
||||
A scheduler optimised to complete .bb files are quickly as possible. The
|
||||
priority map is sorted by task weight, but then reordered so once a given
|
||||
.bb file starts to build, it's completed as quickly as possible. This works
|
||||
.bb file starts to build, its completed as quickly as possible. This works
|
||||
well where disk space is at a premium and classes like OE's rm_work are in
|
||||
force.
|
||||
"""
|
||||
@@ -261,13 +261,6 @@ class RunQueueData:
|
||||
taskname = self.runq_task[task] + task_name_suffix
|
||||
return "%s, %s" % (fn, taskname)
|
||||
|
||||
def get_short_user_idstring(self, task, task_name_suffix = ""):
|
||||
fn = self.taskData.fn_index[self.runq_fnid[task]]
|
||||
pn = self.dataCache.pkg_fn[fn]
|
||||
taskname = self.runq_task[task] + task_name_suffix
|
||||
return "%s:%s" % (pn, taskname)
|
||||
|
||||
|
||||
def get_task_id(self, fnid, taskname):
|
||||
for listid in xrange(len(self.runq_fnid)):
|
||||
if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
|
||||
@@ -437,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -641,33 +634,23 @@ class RunQueueData:
|
||||
|
||||
fnid = taskData.build_targets[targetid][0]
|
||||
fn = taskData.fn_index[fnid]
|
||||
task = target[1]
|
||||
parents = False
|
||||
if task.endswith('-'):
|
||||
parents = True
|
||||
task = task[:-1]
|
||||
|
||||
self.target_pairs.append((fn, task))
|
||||
self.target_pairs.append((fn, target[1]))
|
||||
|
||||
if fnid in taskData.failed_fnids:
|
||||
continue
|
||||
|
||||
if task not in taskData.tasks_lookup[fnid]:
|
||||
if target[1] not in taskData.tasks_lookup[fnid]:
|
||||
import difflib
|
||||
close_matches = difflib.get_close_matches(task, taskData.tasks_lookup[fnid], cutoff=0.7)
|
||||
close_matches = difflib.get_close_matches(target[1], taskData.tasks_lookup[fnid], cutoff=0.7)
|
||||
if close_matches:
|
||||
extra = ". Close matches:\n %s" % "\n ".join(close_matches)
|
||||
else:
|
||||
extra = ""
|
||||
bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (task, target[0], extra))
|
||||
|
||||
# For tasks called "XXXX-", ony run their dependencies
|
||||
listid = taskData.tasks_lookup[fnid][task]
|
||||
if parents:
|
||||
for i in self.runq_depends[listid]:
|
||||
mark_active(i, 1)
|
||||
else:
|
||||
mark_active(listid, 1)
|
||||
bb.msg.fatal("RunQueue", "Task %s does not exist for target %s%s" % (target[1], target[0], extra))
|
||||
|
||||
listid = taskData.tasks_lookup[fnid][target[1]]
|
||||
|
||||
mark_active(listid, 1)
|
||||
|
||||
# Step C - Prune all inactive tasks
|
||||
#
|
||||
@@ -760,72 +743,11 @@ class RunQueueData:
|
||||
seen_pn.append(pn)
|
||||
else:
|
||||
bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
|
||||
msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))
|
||||
#
|
||||
# Construct a list of things which uniquely depend on each provider
|
||||
# since this may help the user figure out which dependency is triggering this warning
|
||||
#
|
||||
msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from."
|
||||
deplist = {}
|
||||
commondeps = None
|
||||
for provfn in prov_list[prov]:
|
||||
deps = set()
|
||||
for task, fnid in enumerate(self.runq_fnid):
|
||||
fn = taskData.fn_index[fnid]
|
||||
if fn != provfn:
|
||||
continue
|
||||
for dep in self.runq_revdeps[task]:
|
||||
fn = taskData.fn_index[self.runq_fnid[dep]]
|
||||
if fn == provfn:
|
||||
continue
|
||||
deps.add(self.get_short_user_idstring(dep))
|
||||
if not commondeps:
|
||||
commondeps = set(deps)
|
||||
else:
|
||||
commondeps &= deps
|
||||
deplist[provfn] = deps
|
||||
for provfn in deplist:
|
||||
msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps))
|
||||
#
|
||||
# Construct a list of provides and runtime providers for each recipe
|
||||
# (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
|
||||
#
|
||||
msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful."
|
||||
provide_results = {}
|
||||
rprovide_results = {}
|
||||
commonprovs = None
|
||||
commonrprovs = None
|
||||
for provfn in prov_list[prov]:
|
||||
provides = set(self.dataCache.fn_provides[provfn])
|
||||
rprovides = set()
|
||||
for rprovide in self.dataCache.rproviders:
|
||||
if provfn in self.dataCache.rproviders[rprovide]:
|
||||
rprovides.add(rprovide)
|
||||
for package in self.dataCache.packages:
|
||||
if provfn in self.dataCache.packages[package]:
|
||||
rprovides.add(package)
|
||||
for package in self.dataCache.packages_dynamic:
|
||||
if provfn in self.dataCache.packages_dynamic[package]:
|
||||
rprovides.add(package)
|
||||
if not commonprovs:
|
||||
commonprovs = set(provides)
|
||||
else:
|
||||
commonprovs &= provides
|
||||
provide_results[provfn] = provides
|
||||
if not commonrprovs:
|
||||
commonrprovs = set(rprovides)
|
||||
else:
|
||||
commonrprovs &= rprovides
|
||||
rprovide_results[provfn] = rprovides
|
||||
#msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs))
|
||||
#msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs))
|
||||
for provfn in prov_list[prov]:
|
||||
msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs))
|
||||
msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs))
|
||||
|
||||
msg = "Multiple .bb files are due to be built which each provide %s (%s)." % (prov, " ".join(prov_list[prov]))
|
||||
if self.warn_multi_bb:
|
||||
logger.warning(msg)
|
||||
logger.warn(msg)
|
||||
else:
|
||||
msg += "\n This usually means one provides something the other doesn't and should."
|
||||
logger.error(msg)
|
||||
|
||||
# Create a whitelist usable by the stamp checks
|
||||
@@ -852,7 +774,7 @@ class RunQueueData:
|
||||
taskdep = self.dataCache.task_deps[fn]
|
||||
fnid = self.taskData.getfn_id(fn)
|
||||
if taskname not in taskData.tasks_lookup[fnid]:
|
||||
logger.warning("Task %s does not exist, invalidating this task will have no effect" % taskname)
|
||||
logger.warn("Task %s does not exist, invalidating this task will have no effect" % taskname)
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
if error_nostamp:
|
||||
bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
|
||||
@@ -871,20 +793,9 @@ class RunQueueData:
|
||||
if self.cooker.configuration.invalidate_stamp:
|
||||
for (fn, target) in self.target_pairs:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
if not st.startswith("do_"):
|
||||
st = "do_%s" % st
|
||||
invalidate_task(fn, st, True)
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
|
||||
# Create and print to the logs a virtual/xxxx -> PN (fn) table
|
||||
virtmap = taskData.get_providermap(prefix="virtual/")
|
||||
virtpnmap = {}
|
||||
for v in virtmap:
|
||||
virtpnmap[v] = self.dataCache.pkg_fn[virtmap[v]]
|
||||
bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
|
||||
if hasattr(bb.parse.siggen, "tasks_resolved"):
|
||||
bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCache)
|
||||
|
||||
# Iterate over the task list and call into the siggen code
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
todeal = set(range(len(self.runq_fnid)))
|
||||
while len(todeal) > 0:
|
||||
@@ -897,7 +808,6 @@ class RunQueueData:
|
||||
procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
|
||||
self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
|
||||
|
||||
bb.parse.siggen.writeout_file_checksum_cache()
|
||||
return len(self.runq_fnid)
|
||||
|
||||
def dump_data(self, taskQueue):
|
||||
@@ -949,19 +859,15 @@ class RunQueue:
|
||||
|
||||
def _start_worker(self, fakeroot = False, rqexec = None):
|
||||
logger.debug(1, "Starting bitbake-worker")
|
||||
magic = "decafbad"
|
||||
if self.cooker.configuration.profile:
|
||||
magic = "decafbadbad"
|
||||
if fakeroot:
|
||||
magic = magic + "beef"
|
||||
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
|
||||
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
|
||||
env = os.environ.copy()
|
||||
for key, value in (var.split('=') for var in fakerootenv):
|
||||
env[key] = value
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
else:
|
||||
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
bb.utils.nonblockingfd(worker.stdout)
|
||||
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
|
||||
|
||||
@@ -970,7 +876,9 @@ class RunQueue:
|
||||
"fakerootenv" : self.rqdata.dataCache.fakerootenv,
|
||||
"fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
|
||||
"fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
|
||||
"sigdata" : bb.parse.siggen.get_taskdata(),
|
||||
"hashes" : bb.parse.siggen.taskhash,
|
||||
"hash_deps" : bb.parse.siggen.runtaskdeps,
|
||||
"sigchecksums" : bb.parse.siggen.file_checksum_values,
|
||||
"runq_hash" : self.rqdata.runq_hash,
|
||||
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
|
||||
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
|
||||
@@ -1059,11 +967,11 @@ class RunQueue:
|
||||
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
|
||||
# If the stamp is missing, it's not current
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
logger.debug(2, "Stampfile %s not available", stampfile)
|
||||
return False
|
||||
# If it's a 'nostamp' task, it's not current
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
|
||||
@@ -1085,19 +993,15 @@ class RunQueue:
|
||||
stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
|
||||
t2 = get_timestamp(stampfile2)
|
||||
t3 = get_timestamp(stampfile3)
|
||||
if t3 and not t2:
|
||||
continue
|
||||
if t3 and t3 > t2:
|
||||
continue
|
||||
continue
|
||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||
if not t2:
|
||||
logger.debug(2, 'Stampfile %s does not exist', stampfile2)
|
||||
iscurrent = False
|
||||
break
|
||||
if t1 < t2:
|
||||
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
|
||||
iscurrent = False
|
||||
break
|
||||
if recurse and iscurrent:
|
||||
if dep in cache:
|
||||
iscurrent = cache[dep]
|
||||
@@ -1126,10 +1030,10 @@ class RunQueue:
|
||||
else:
|
||||
self.state = runQueueSceneInit
|
||||
|
||||
# we are ready to run, emit dependency info to any UI or class which
|
||||
# needs it
|
||||
depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
|
||||
bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
|
||||
# we are ready to run, see if any UI client needs the dependency info
|
||||
if bb.cooker.CookerFeatures.SEND_DEPENDS_TREE in self.cooker.featureset:
|
||||
depgraph = self.cooker.buildDependTree(self, self.rqdata.taskData)
|
||||
bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.cooker.data)
|
||||
|
||||
if self.state is runQueueSceneInit:
|
||||
dump = self.cooker.configuration.dump_signatures
|
||||
@@ -1151,20 +1055,17 @@ class RunQueue:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
if self.state is runQueueRunInit:
|
||||
if self.cooker.configuration.setsceneonly:
|
||||
self.state = runQueueComplete
|
||||
else:
|
||||
logger.info("Executing RunQueue Tasks")
|
||||
self.rqexe = RunQueueExecuteTasks(self)
|
||||
self.state = runQueueRunning
|
||||
logger.info("Executing RunQueue Tasks")
|
||||
self.rqexe = RunQueueExecuteTasks(self)
|
||||
self.state = runQueueRunning
|
||||
|
||||
if self.state is runQueueRunning:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
if self.state is runQueueCleanUp:
|
||||
retval = self.rqexe.finish()
|
||||
self.rqexe.finish()
|
||||
|
||||
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
self.teardown_workers()
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
@@ -1194,13 +1095,6 @@ class RunQueue:
|
||||
raise
|
||||
except SystemExit:
|
||||
raise
|
||||
except bb.BBHandledException:
|
||||
try:
|
||||
self.teardown_workers()
|
||||
except:
|
||||
pass
|
||||
self.state = runQueueComplete
|
||||
raise
|
||||
except:
|
||||
logger.error("An uncaught exception occured in runqueue, please see the failure below:")
|
||||
try:
|
||||
@@ -1212,7 +1106,6 @@ class RunQueue:
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
self.state = runQueueComplete
|
||||
return
|
||||
|
||||
if now:
|
||||
@@ -1259,14 +1152,9 @@ class RunQueue:
|
||||
sq_hash.append(self.rqdata.runq_hash[task])
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
try:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
# Handle version with no siginfo parameter
|
||||
except TypeError:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
|
||||
@@ -1378,9 +1266,6 @@ class RunQueueExecute:
|
||||
if rq.fakeworkerpipe:
|
||||
rq.fakeworkerpipe.setrunqueueexec(self)
|
||||
|
||||
if self.number_tasks <= 0:
|
||||
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
@@ -1419,14 +1304,15 @@ class RunQueueExecute:
|
||||
if self.stats.active > 0:
|
||||
bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
|
||||
self.rq.read_workers()
|
||||
return self.rq.active_fds()
|
||||
|
||||
return
|
||||
|
||||
if len(self.failed_fnids) != 0:
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
return
|
||||
|
||||
self.rq.state = runQueueComplete
|
||||
return True
|
||||
return
|
||||
|
||||
def check_dependencies(self, task, taskdeps, setscene = False):
|
||||
if not self.rq.depvalidate:
|
||||
@@ -1444,7 +1330,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1475,7 +1361,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
self.runq_buildable.append(1)
|
||||
else:
|
||||
self.runq_buildable.append(0)
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
|
||||
self.rq.scenequeue_covered.add(task)
|
||||
|
||||
found = True
|
||||
@@ -1486,7 +1372,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
continue
|
||||
logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
|
||||
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
|
||||
found = True
|
||||
self.rq.scenequeue_covered.add(task)
|
||||
|
||||
@@ -1513,7 +1399,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1665,8 +1551,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
self.runq_running[task] = 1
|
||||
self.stats.taskActive()
|
||||
if not self.cooker.configuration.dry_run:
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
self.task_complete(task)
|
||||
return True
|
||||
else:
|
||||
@@ -1678,12 +1563,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
|
||||
if not self.rq.fakeworker:
|
||||
try:
|
||||
self.rq.start_fakeworker(self)
|
||||
except OSError as exc:
|
||||
logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
self.rq.start_fakeworker(self)
|
||||
self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
|
||||
self.rq.fakeworker.stdin.flush()
|
||||
else:
|
||||
@@ -1728,8 +1608,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
pn = self.rqdata.dataCache.pkg_fn[fn]
|
||||
taskname = self.rqdata.runq_task[revdep]
|
||||
deps = self.rqdata.runq_depends[revdep]
|
||||
provides = self.rqdata.dataCache.fn_provides[fn]
|
||||
taskdepdata[revdep] = [pn, taskname, fn, deps, provides]
|
||||
taskdepdata[revdep] = [pn, taskname, fn, deps]
|
||||
for revdep2 in deps:
|
||||
if revdep2 not in taskdepdata:
|
||||
additional.append(revdep2)
|
||||
@@ -1795,7 +1674,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_revdeps_new[point] = set()
|
||||
if point in self.rqdata.runq_setscene:
|
||||
sq_revdeps_new[point] = tasks
|
||||
tasks = set()
|
||||
for dep in self.rqdata.runq_depends[point]:
|
||||
if point in sq_revdeps[dep]:
|
||||
sq_revdeps[dep].remove(point)
|
||||
@@ -1808,7 +1686,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
|
||||
process_endpoints(endpoints)
|
||||
|
||||
# Build a list of setscene tasks which are "unskippable"
|
||||
# Build a list of setscene tasks which as "unskippable"
|
||||
# These are direct endpoints referenced by the build
|
||||
endpoints2 = {}
|
||||
sq_revdeps2 = []
|
||||
@@ -1940,7 +1818,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
@@ -1964,10 +1842,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
realtask = self.rqdata.runq_setscene[task]
|
||||
realdep = self.rqdata.runq_setscene[dep]
|
||||
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
|
||||
self.scenequeue_updatecounters(dep, fail)
|
||||
continue
|
||||
if task not in self.sq_revdeps2[dep]:
|
||||
# May already have been removed by the fail case above
|
||||
continue
|
||||
self.sq_revdeps2[dep].remove(task)
|
||||
if len(self.sq_revdeps2[dep]) == 0:
|
||||
@@ -2073,7 +1947,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
|
||||
if not self.rq.fakeworker:
|
||||
self.rq.start_fakeworker(self)
|
||||
self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, realtask, taskname, True, self.cooker.collection.get_file_appends(fn), None)) + "</runtask>")
|
||||
@@ -2102,14 +1976,13 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.rq.scenequeue_covered = set()
|
||||
for task in oldcovered:
|
||||
self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
|
||||
self.rq.scenequeue_notcovered = set()
|
||||
for task in self.scenequeue_notcovered:
|
||||
self.rq.scenequeue_notcovered.add(self.rqdata.runq_setscene[task])
|
||||
|
||||
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
self.rq.state = runQueueRunInit
|
||||
|
||||
completeevent = sceneQueueComplete(self.stats, self.rq)
|
||||
bb.event.fire(completeevent, self.cfgData)
|
||||
|
||||
return True
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
@@ -2162,7 +2035,7 @@ class sceneQueueEvent(runQueueEvent):
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was started
|
||||
Event notifing a task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2170,7 +2043,7 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task was started
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2178,7 +2051,7 @@ class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task failed
|
||||
Event notifing a task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2186,33 +2059,25 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task failed
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueComplete(sceneQueueEvent):
|
||||
"""
|
||||
Event when all the sceneQueue tasks are complete
|
||||
"""
|
||||
def __init__(self, stats, rq):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task completed
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
class sceneQueueTaskCompleted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task completed
|
||||
Event notifing a setscene task completed
|
||||
"""
|
||||
|
||||
class runQueueTaskSkipped(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was skipped
|
||||
Event notifing a task was skipped
|
||||
"""
|
||||
def __init__(self, task, stats, rq, reason):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
|
||||
@@ -63,9 +63,6 @@ class BitBakeBaseServerConnection():
|
||||
def terminate(self):
|
||||
pass
|
||||
|
||||
def setupEventQueue(self):
|
||||
pass
|
||||
|
||||
|
||||
""" BitBakeBaseServer class is the common ancestor to all Bitbake servers
|
||||
|
||||
|
||||
@@ -38,27 +38,21 @@ from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
class ServerCommunicator():
|
||||
def __init__(self, connection, event_handle, server):
|
||||
def __init__(self, connection, event_handle):
|
||||
self.connection = connection
|
||||
self.event_handle = event_handle
|
||||
self.server = server
|
||||
|
||||
def runCommand(self, command):
|
||||
# @todo try/except
|
||||
self.connection.send(command)
|
||||
|
||||
if not self.server.is_alive():
|
||||
raise SystemExit
|
||||
|
||||
while True:
|
||||
# don't let the user ctrl-c while we're waiting for a response
|
||||
try:
|
||||
for idx in range(0,4): # 0, 1, 2, 3
|
||||
if self.connection.poll(5):
|
||||
return self.connection.recv()
|
||||
else:
|
||||
bb.warn("Timeout while attempting to communicate with bitbake server")
|
||||
bb.fatal("Gave up; Too many tries: timeout while attempting to communicate with bitbake server")
|
||||
if self.connection.poll(20):
|
||||
return self.connection.recv()
|
||||
else:
|
||||
bb.fatal("Timeout while attempting to communicate with bitbake server")
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
@@ -99,7 +93,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
def run(self):
|
||||
for event in bb.event.ui_queue:
|
||||
self.event_queue.put(event)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self, True)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self)
|
||||
|
||||
bb.cooker.server_main(self.cooker, self.main)
|
||||
|
||||
@@ -108,7 +102,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
# the UI and communicated to us
|
||||
self.quitin.close()
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
bb.utils.set_process_name("Cooker")
|
||||
while not self.quit:
|
||||
try:
|
||||
if self.command_channel.poll():
|
||||
@@ -117,12 +110,8 @@ class ProcessServer(Process, BaseImplServer):
|
||||
if self.quitout.poll():
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
try:
|
||||
self.runCommand(["stateForceShutdown"])
|
||||
except:
|
||||
pass
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
logger.exception('Running command %s', command)
|
||||
|
||||
@@ -130,12 +119,9 @@ class ProcessServer(Process, BaseImplServer):
|
||||
bb.event.unregister_UIHhandler(self.event_handle.value)
|
||||
self.command_channel.close()
|
||||
self.cooker.shutdown(True)
|
||||
self.quitout.close()
|
||||
|
||||
def idle_commands(self, delay, fds=None):
|
||||
def idle_commands(self, delay, fds = []):
|
||||
nextsleep = delay
|
||||
if not fds:
|
||||
fds = []
|
||||
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
@@ -145,20 +131,14 @@ class ProcessServer(Process, BaseImplServer):
|
||||
nextsleep = None
|
||||
elif retval is True:
|
||||
nextsleep = None
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
elif nextsleep is None:
|
||||
continue
|
||||
else:
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as exc:
|
||||
if not isinstance(exc, bb.BBHandledException):
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
|
||||
if nextsleep is not None:
|
||||
select.select(fds,[],[],nextsleep)
|
||||
@@ -178,18 +158,14 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
self.procserver = serverImpl
|
||||
self.ui_channel = ui_channel
|
||||
self.event_queue = event_queue
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
|
||||
self.events = self.event_queue
|
||||
self.terminated = False
|
||||
|
||||
def sigterm_terminate(self):
|
||||
bb.error("UI received SIGTERM")
|
||||
self.terminate()
|
||||
|
||||
def terminate(self):
|
||||
if self.terminated:
|
||||
return
|
||||
self.terminated = True
|
||||
def flushevents():
|
||||
while True:
|
||||
try:
|
||||
@@ -215,34 +191,27 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
def __init__(self, maxsize):
|
||||
multiprocessing.queues.Queue.__init__(self, maxsize)
|
||||
self.exit = False
|
||||
bb.utils.set_process_name("ProcessEQueue")
|
||||
|
||||
def setexit(self):
|
||||
self.exit = True
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
return self.getEvent()
|
||||
raise KeyboardInterrupt()
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
return self.getEvent()
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def getEvent(self):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
return None
|
||||
|
||||
|
||||
class BitBakeServer(BitBakeBaseServer):
|
||||
def initServer(self, single_use=True):
|
||||
def initServer(self):
|
||||
# establish communication channels. We use bidirectional pipes for
|
||||
# ui <--> server command/response pairs
|
||||
# and a queue for server -> ui event notifications
|
||||
@@ -250,7 +219,6 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
self.ui_channel, self.server_channel = Pipe()
|
||||
self.event_queue = ProcessEventQueue(0)
|
||||
self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
|
||||
self.event_queue.server = self.serverImpl
|
||||
|
||||
def detach(self):
|
||||
self.serverImpl.start()
|
||||
|
||||
@@ -80,7 +80,7 @@ class BBTransport(xmlrpclib.Transport):
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
class BitBakeServerCommands():
|
||||
@@ -97,10 +97,10 @@ class BitBakeServerCommands():
|
||||
|
||||
# we don't allow connections if the cooker is running
|
||||
if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
|
||||
return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.cooker.state)
|
||||
return None
|
||||
|
||||
self.event_handle = bb.event.register_UIHhandler(s, True)
|
||||
return self.event_handle, 'OK'
|
||||
self.event_handle = bb.event.register_UIHhandler(s)
|
||||
return self.event_handle
|
||||
|
||||
def unregisterEventHandler(self, handlerNum):
|
||||
"""
|
||||
@@ -186,12 +186,13 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
# remove this when you're done with debugging
|
||||
# allow_reuse_address = True
|
||||
|
||||
def __init__(self, interface, single_use=False):
|
||||
def __init__(self, interface):
|
||||
"""
|
||||
Constructor
|
||||
"""
|
||||
BaseImplServer.__init__(self)
|
||||
self.single_use = single_use
|
||||
if (interface[1] == 0): # anonymous port, not getting reused
|
||||
self.single_use = True
|
||||
# Use auto port configuration
|
||||
if (interface[1] == -1):
|
||||
interface = (interface[0], 0)
|
||||
@@ -204,6 +205,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
self.commands = BitBakeServerCommands(self)
|
||||
self.autoregister_all_functions(self.commands, "")
|
||||
self.interface = interface
|
||||
self.single_use = False
|
||||
|
||||
def addcooker(self, cooker):
|
||||
BaseImplServer.addcooker(self, cooker)
|
||||
@@ -233,16 +235,12 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
del self._idlefuns[function]
|
||||
elif retval is True:
|
||||
nextsleep = 0
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
else:
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
@@ -250,21 +248,14 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if retval == None:
|
||||
# the function execute failed; delete it
|
||||
del self._idlefuns[function]
|
||||
pass
|
||||
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
socktimeout = min(socktimeout, nextsleep)
|
||||
# Mirror what BaseServer handle_request would do
|
||||
try:
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
except IOError:
|
||||
# we ignore interrupted calls
|
||||
pass
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in self._idlefuns.items():
|
||||
@@ -279,41 +270,33 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
self.connection_token = token
|
||||
|
||||
class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = None):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = []):
|
||||
self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
|
||||
self.clientinfo = clientinfo
|
||||
self.serverImpl = serverImpl
|
||||
self.observer_only = observer_only
|
||||
if featureset:
|
||||
self.featureset = featureset
|
||||
self.featureset = featureset
|
||||
|
||||
def connect(self):
|
||||
if not self.observer_only:
|
||||
token = self.connection.addClient()
|
||||
else:
|
||||
self.featureset = []
|
||||
|
||||
def connect(self, token = None):
|
||||
if token is None:
|
||||
if self.observer_only:
|
||||
token = "observer"
|
||||
else:
|
||||
token = self.connection.addClient()
|
||||
|
||||
token = "observer"
|
||||
if token is None:
|
||||
return None
|
||||
|
||||
self.transport.set_connection_token(token)
|
||||
return self
|
||||
|
||||
def setupEventQueue(self):
|
||||
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
|
||||
for event in bb.event.ui_queue:
|
||||
self.events.queue_event(event)
|
||||
|
||||
_, error = self.connection.runCommand(["setFeatures", self.featureset])
|
||||
if error:
|
||||
# disconnect the client, we can't make the setFeature work
|
||||
self.connection.removeClient()
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
return self
|
||||
|
||||
def removeClient(self):
|
||||
if not self.observer_only:
|
||||
self.connection.removeClient()
|
||||
@@ -332,9 +315,9 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
pass
|
||||
|
||||
class BitBakeServer(BitBakeBaseServer):
|
||||
def initServer(self, interface = ("localhost", 0), single_use = False):
|
||||
def initServer(self, interface = ("localhost", 0)):
|
||||
self.interface = interface
|
||||
self.serverImpl = XMLRPCServer(interface, single_use)
|
||||
self.serverImpl = XMLRPCServer(interface)
|
||||
|
||||
def detach(self):
|
||||
daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
|
||||
@@ -349,9 +332,7 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
|
||||
class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
|
||||
def __init__(self, observer_only = False, token = None):
|
||||
self.token = token
|
||||
|
||||
def __init__(self, observer_only = False):
|
||||
self.observer_only = observer_only
|
||||
# if we need extra caches, just tell the server to load them all
|
||||
pass
|
||||
@@ -359,14 +340,37 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
def saveConnectionDetails(self, remote):
|
||||
self.remote = remote
|
||||
|
||||
def saveConnectionConfigParams(self, configParams):
|
||||
self.configParams = configParams
|
||||
|
||||
def establishConnection(self, featureset):
|
||||
# The format of "remote" must be "server:port"
|
||||
try:
|
||||
[host, port] = self.remote.split(":")
|
||||
port = int(port)
|
||||
except Exception as e:
|
||||
bb.warn("Failed to read remote definition (%s)" % str(e))
|
||||
raise e
|
||||
bb.fatal("Failed to read remote definition (%s)" % str(e))
|
||||
|
||||
# use automatic port if port set to -1, meaning read it from
|
||||
# the bitbake.lock file
|
||||
if port == -1:
|
||||
lock_location = "%s/bitbake.lock" % self.configParams.environment.get('BUILDDIR')
|
||||
lock = bb.utils.lockfile(lock_location, False, False)
|
||||
if lock:
|
||||
# This means there is no server running which we can
|
||||
# connect to on the local system.
|
||||
bb.utils.unlockfile(lock)
|
||||
return None
|
||||
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
self.remote = remotedef
|
||||
except Exception as e:
|
||||
bb.fatal("Failed to read bitbake.lock (%s)" % str(e))
|
||||
|
||||
# We need our IP for the server connection. We get the IP
|
||||
# by trying to connect with the server
|
||||
@@ -376,15 +380,13 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
ip = s.getsockname()[0]
|
||||
s.close()
|
||||
except Exception as e:
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
return self.connection.connect()
|
||||
except Exception as e:
|
||||
bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
|
||||
def endSession(self):
|
||||
self.connection.removeClient()
|
||||
|
||||
@@ -4,7 +4,6 @@ import os
|
||||
import re
|
||||
import tempfile
|
||||
import bb.data
|
||||
from bb.checksum import FileChecksumCache
|
||||
|
||||
logger = logging.getLogger('BitBake.SigGen')
|
||||
|
||||
@@ -38,7 +37,6 @@ class SignatureGenerator(object):
|
||||
self.taskhash = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.taints = {}
|
||||
|
||||
def finalise(self, fn, d, varient):
|
||||
return
|
||||
@@ -46,8 +44,7 @@ class SignatureGenerator(object):
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
return "0"
|
||||
|
||||
def writeout_file_checksum_cache(self):
|
||||
"""Write/update the file checksum cache onto disk"""
|
||||
def set_taskdata(self, hashes, deps, checksum):
|
||||
return
|
||||
|
||||
def stampfile(self, stampbase, file_name, taskname, extrainfo):
|
||||
@@ -65,13 +62,6 @@ class SignatureGenerator(object):
|
||||
def dump_sigs(self, dataCache, options):
|
||||
return
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints = data
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
"""
|
||||
@@ -83,19 +73,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.taints = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
|
||||
self.taskwhitelist = None
|
||||
self.init_rundepcheck(data)
|
||||
checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True)
|
||||
if checksum_cache_file:
|
||||
self.checksum_cache = FileChecksumCache()
|
||||
self.checksum_cache.init_cache(data, checksum_cache_file)
|
||||
else:
|
||||
self.checksum_cache = None
|
||||
|
||||
def init_rundepcheck(self, data):
|
||||
self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
|
||||
@@ -155,7 +138,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
try:
|
||||
taskdeps = self._build_data(fn, d)
|
||||
except:
|
||||
bb.warn("Error during finalise of %s" % fn)
|
||||
bb.note("Error during finalise of %s" % fn)
|
||||
raise
|
||||
|
||||
#Slow but can be useful for debugging mismatched basehashes
|
||||
@@ -187,9 +170,8 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = []
|
||||
self.file_checksum_values[k] = {}
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
if not self.rundep_check(fn, recipename, task, dep, depname, dataCache):
|
||||
@@ -200,50 +182,29 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.runtaskdeps[k].append(dep)
|
||||
|
||||
if task in dataCache.file_checksums[fn]:
|
||||
if self.checksum_cache:
|
||||
checksums = self.checksum_cache.get_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
else:
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k].append((f,cs))
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
taskdep = dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
taint = str(uuid.uuid4())
|
||||
data = data + taint
|
||||
self.taints[k] = "nostamp:" + taint
|
||||
self.file_checksum_values[k][f] = cs
|
||||
data = data + cs
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
self.taints[k] = taint
|
||||
logger.warning("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
|
||||
def writeout_file_checksum_cache(self):
|
||||
"""Write/update the file checksum cache onto disk"""
|
||||
if self.checksum_cache:
|
||||
self.checksum_cache.save_extras()
|
||||
self.checksum_cache.save_merge()
|
||||
else:
|
||||
bb.fetch2.fetcher_parse_save()
|
||||
bb.fetch2.fetcher_parse_done()
|
||||
def set_taskdata(self, hashes, deps, checksums):
|
||||
self.runtaskdeps = deps
|
||||
self.taskhash = hashes
|
||||
self.file_checksum_values = checksums
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
|
||||
k = fn + "." + task
|
||||
referencestamp = stampbase
|
||||
if isinstance(runtime, str) and runtime.startswith("customfile"):
|
||||
if runtime == "customfile":
|
||||
sigfile = stampbase
|
||||
referencestamp = runtime[11:]
|
||||
elif runtime and k in self.taskhash:
|
||||
sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
|
||||
else:
|
||||
@@ -252,7 +213,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
bb.utils.mkdirhier(os.path.dirname(sigfile))
|
||||
|
||||
data = {}
|
||||
data['task'] = task
|
||||
data['basewhitelist'] = self.basewhitelist
|
||||
data['taskwhitelist'] = self.taskwhitelist
|
||||
data['taskdeps'] = self.taskdeps[fn][task]
|
||||
@@ -268,20 +228,15 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
if runtime and k in self.taskhash:
|
||||
data['runtaskdeps'] = self.runtaskdeps[k]
|
||||
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k]]
|
||||
data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[k].items()]
|
||||
data['runtaskhashes'] = {}
|
||||
for dep in data['runtaskdeps']:
|
||||
data['runtaskhashes'][dep] = self.taskhash[dep]
|
||||
data['taskhash'] = self.taskhash[k]
|
||||
|
||||
taint = self.read_taint(fn, task, referencestamp)
|
||||
taint = self.read_taint(fn, task, stampbase)
|
||||
if taint:
|
||||
data['taint'] = taint
|
||||
|
||||
if runtime and k in self.taints:
|
||||
if 'nostamp:' in self.taints[k]:
|
||||
data['taint'] = self.taints[k]
|
||||
|
||||
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as stream:
|
||||
@@ -296,15 +251,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
pass
|
||||
raise err
|
||||
|
||||
computed_basehash = calc_basehash(data)
|
||||
if computed_basehash != self.basehash[k]:
|
||||
bb.error("Basehash mismatch %s verses %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
if k in self.taskhash:
|
||||
computed_taskhash = calc_taskhash(data)
|
||||
if computed_taskhash != self.taskhash[k]:
|
||||
bb.error("Taskhash mismatch %s verses %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
|
||||
|
||||
def dump_sigs(self, dataCache, options):
|
||||
for fn in self.taskdeps:
|
||||
for task in self.taskdeps[fn]:
|
||||
@@ -344,13 +290,13 @@ def dump_this_task(outfile, d):
|
||||
import bb.parse
|
||||
fn = d.getVar("BB_FILENAME", True)
|
||||
task = "do_" + d.getVar("BB_CURRENTTASK", True)
|
||||
referencestamp = bb.build.stamp_internal(task, d, None, True)
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp)
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
||||
|
||||
def clean_basepath(a):
|
||||
b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
|
||||
if a.startswith("virtual:"):
|
||||
b = b + ":" + a.rsplit(":", 1)[0]
|
||||
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
||||
else:
|
||||
b = a.rsplit("/", 1)[1]
|
||||
return b
|
||||
|
||||
def clean_basepaths(a):
|
||||
@@ -359,12 +305,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -465,21 +405,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
else:
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
b = b_data['runtaskhashes']
|
||||
@@ -522,40 +447,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
return output
|
||||
|
||||
|
||||
def calc_basehash(sigdata):
|
||||
task = sigdata['task']
|
||||
basedata = sigdata['varvals'][task]
|
||||
|
||||
if basedata is None:
|
||||
basedata = ''
|
||||
|
||||
alldeps = sigdata['taskdeps']
|
||||
for dep in alldeps:
|
||||
basedata = basedata + dep
|
||||
val = sigdata['varvals'][dep]
|
||||
if val is not None:
|
||||
basedata = basedata + str(val)
|
||||
|
||||
return hashlib.md5(basedata).hexdigest()
|
||||
|
||||
def calc_taskhash(sigdata):
|
||||
data = sigdata['basehash']
|
||||
|
||||
for dep in sigdata['runtaskdeps']:
|
||||
data = data + sigdata['runtaskhashes'][dep]
|
||||
|
||||
for c in sigdata['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in sigdata:
|
||||
if 'nostamp:' in sigdata['taint']:
|
||||
data = data + sigdata['taint'][8:]
|
||||
else:
|
||||
data = data + sigdata['taint']
|
||||
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
|
||||
def dump_sigfile(a):
|
||||
output = []
|
||||
|
||||
@@ -589,13 +480,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
if 'task' in a_data:
|
||||
computed_basehash = calc_basehash(a_data)
|
||||
output.append("Computed base hash is %s and from file %s" % (computed_basehash, a_data['basehash']))
|
||||
else:
|
||||
output.append("Unable to compute base hash")
|
||||
|
||||
computed_taskhash = calc_taskhash(a_data)
|
||||
output.append("Computed task hash is %s" % computed_taskhash)
|
||||
|
||||
return output
|
||||
|
||||
@@ -41,7 +41,7 @@ class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
@@ -70,7 +70,6 @@ class TaskData:
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
self.allowincomplete = allowincomplete
|
||||
|
||||
self.skiplist = skiplist
|
||||
|
||||
@@ -172,8 +171,6 @@ class TaskData:
|
||||
if fnid in self.tasks_fnid:
|
||||
return
|
||||
|
||||
self.add_extra_deps(fn, dataCache)
|
||||
|
||||
for task in task_deps['tasks']:
|
||||
|
||||
# Work out task dependencies
|
||||
@@ -244,21 +241,6 @@ class TaskData:
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
|
||||
def add_extra_deps(self, fn, dataCache):
|
||||
func = dataCache.extradepsfunc.get(fn, None)
|
||||
if func:
|
||||
bb.providers.buildWorldTargetList(dataCache)
|
||||
pn = dataCache.pkg_fn[fn]
|
||||
params = {'deps': dataCache.deps[fn],
|
||||
'world_target': dataCache.world_target,
|
||||
'pkg_pn': dataCache.pkg_pn,
|
||||
'self_pn': pn}
|
||||
funcname = '_%s_calculate_extra_depends' % pn.replace('-', '_')
|
||||
paramlist = ','.join(params.keys())
|
||||
func = 'def %s(%s):\n%s\n\n%s(%s)' % (funcname, paramlist, func, funcname, paramlist)
|
||||
bb.utils.better_exec(func, params)
|
||||
|
||||
|
||||
def have_build_target(self, target):
|
||||
"""
|
||||
Have we a build target matching this name?
|
||||
@@ -446,14 +428,7 @@ class TaskData:
|
||||
return
|
||||
|
||||
if not item in dataCache.providers:
|
||||
close_matches = self.get_close_matches(item, dataCache.providers.keys())
|
||||
# Is it in RuntimeProviders ?
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
for fn in all_p:
|
||||
new = dataCache.pkg_fn[fn] + " RPROVIDES " + item
|
||||
if new not in close_matches:
|
||||
close_matches.append(new)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=self.get_close_matches(item, dataCache.providers.keys())), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if self.have_build_target(item):
|
||||
@@ -538,7 +513,7 @@ class TaskData:
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
def fail_fnid(self, fnid, missing_list=None):
|
||||
def fail_fnid(self, fnid, missing_list = []):
|
||||
"""
|
||||
Mark a file as failed (unbuildable)
|
||||
Remove any references from build and runtime provider lists
|
||||
@@ -547,8 +522,6 @@ class TaskData:
|
||||
"""
|
||||
if fnid in self.failed_fnids:
|
||||
return
|
||||
if not missing_list:
|
||||
missing_list = []
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
|
||||
self.failed_fnids.append(fnid)
|
||||
for target in self.build_targets:
|
||||
@@ -562,7 +535,7 @@ class TaskData:
|
||||
if len(self.run_targets[target]) == 0:
|
||||
self.remove_runtarget(target, missing_list)
|
||||
|
||||
def remove_buildtarget(self, targetid, missing_list=None):
|
||||
def remove_buildtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a build target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
@@ -587,7 +560,7 @@ class TaskData:
|
||||
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
raise bb.providers.NoProvider(target)
|
||||
|
||||
def remove_runtarget(self, targetid, missing_list=None):
|
||||
def remove_runtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a run target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
@@ -621,10 +594,9 @@ class TaskData:
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets and not self.allowincomplete:
|
||||
if self.abort and targetid in self.external_targets:
|
||||
raise
|
||||
if not self.allowincomplete:
|
||||
self.remove_buildtarget(targetid)
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
@@ -636,17 +608,6 @@ class TaskData:
|
||||
break
|
||||
# self.dump_data()
|
||||
|
||||
def get_providermap(self, prefix=None):
|
||||
provmap = {}
|
||||
for name in self.build_names_index:
|
||||
if prefix and not name.startswith(prefix):
|
||||
continue
|
||||
if self.have_build_target(name):
|
||||
provider = self.get_provider(name)
|
||||
if provider:
|
||||
provmap[name] = self.fn_index[provider[0]]
|
||||
return provmap
|
||||
|
||||
def dump_data(self):
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
|
||||
@@ -293,16 +293,11 @@ bb.data.getVar(a(), d, False)
|
||||
def test_python(self):
|
||||
self.d.setVar("FOO", self.pydata)
|
||||
self.setEmptyVars(["inexpand", "a", "test2", "test"])
|
||||
self.d.setVarFlags("FOO", {
|
||||
"func": True,
|
||||
"python": True,
|
||||
"lineno": 1,
|
||||
"filename": "example.bb",
|
||||
})
|
||||
self.d.setVarFlags("FOO", {"func": True, "python": True})
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
@@ -349,7 +344,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "inverted"] + execs))
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
@@ -359,7 +354,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
@@ -368,7 +363,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
|
||||
@@ -34,14 +34,14 @@ class COWTestCase(unittest.TestCase):
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEqual(False, 'a' in a)
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEqual(True, 'a' in a)
|
||||
self.assertEqual(True, 'b' in a)
|
||||
self.assertEqual('a', a['a'] )
|
||||
self.assertEqual('b', a['b'] )
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
@@ -60,31 +60,31 @@ class COWTestCase(unittest.TestCase):
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEqual(False, 'c' in c)
|
||||
self.assertEqual(30, c['a'])
|
||||
self.assertEqual(10, b['a'])
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEqual(False, 'c' in c_2)
|
||||
self.assertEqual(10, b_2['a'])
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEqual(False, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEqual(True, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(30, c_2['d'])
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
@@ -92,19 +92,19 @@ class COWTestCase(unittest.TestCase):
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(False, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEqual('viel', b_3['e'])
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(True, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
@@ -115,12 +115,12 @@ class COWTestCase(unittest.TestCase):
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1027, copy['123'])
|
||||
self.assertEqual(4711, copy['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
@@ -128,9 +128,9 @@ class COWTestCase(unittest.TestCase):
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1028, copy['123'])
|
||||
self.assertEqual(4712, copy['other'])
|
||||
self.assertEqual({'abc':20, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
|
||||
@@ -24,30 +24,6 @@ import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
import bb.parse
|
||||
import logging
|
||||
|
||||
class LogRecord():
|
||||
def __enter__(self):
|
||||
logs = []
|
||||
class LogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
logs.append(record)
|
||||
logger = logging.getLogger("BitBake")
|
||||
handler = LogHandler()
|
||||
self.handler = handler
|
||||
logger.addHandler(handler)
|
||||
return logs
|
||||
def __exit__(self, type, value, traceback):
|
||||
logger = logging.getLogger("BitBake")
|
||||
logger.removeHandler(self.handler)
|
||||
return
|
||||
|
||||
def logContains(item, logs):
|
||||
for l in logs:
|
||||
m = l.getMessage()
|
||||
if item in m:
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -80,11 +56,6 @@ class DataExpansions(unittest.TestCase):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value_of_foo value_of_bar")
|
||||
|
||||
def test_python_unexpanded(self):
|
||||
self.d.setVar("bar", "${unsetvar}")
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}")
|
||||
|
||||
def test_python_snippet_syntax_error(self):
|
||||
self.d.setVar("FOO", "${@foo = 5}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
@@ -139,23 +110,17 @@ class DataExpansions(unittest.TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -201,28 +166,28 @@ class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo", False),
|
||||
d.getVar("bar", False))
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -275,13 +240,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
def test_append_unset(self):
|
||||
self.d.setVar("TEST_prepend", "${FOO}:")
|
||||
self.d.setVar("TEST_append", ":val2")
|
||||
self.d.setVar("TEST_append", ":${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
|
||||
|
||||
def test_remove(self):
|
||||
self.d.setVar("TEST", "${VAL} ${BAR}")
|
||||
self.d.setVar("TEST_remove", "val")
|
||||
@@ -301,20 +259,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "")
|
||||
|
||||
def test_remove_expansion(self):
|
||||
self.d.setVar("BAR", "Z")
|
||||
self.d.setVar("TEST", "${BAR}/X Y")
|
||||
self.d.setVar("TEST_remove", "${BAR}/X")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "Y")
|
||||
|
||||
def test_remove_expansion_items(self):
|
||||
self.d.setVar("TEST", "A B C D")
|
||||
self.d.setVar("BAR", "B D")
|
||||
self.d.setVar("TEST_remove", "${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "A C")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -330,66 +274,13 @@ class TestOverrides(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_one_override_unset(self):
|
||||
self.d.setVar("TEST2_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
|
||||
def test_multiple_combined_overrides(self):
|
||||
self.d.setVar("TEST_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_multiple_overrides_unset(self):
|
||||
self.d.setVar("TEST2_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
|
||||
|
||||
def test_keyexpansion_override(self):
|
||||
self.d.setVar("LOCAL", "local")
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_${LOCAL}", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_rename_override(self):
|
||||
self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
|
||||
self.d.setVar("OVERRIDES", "class-target")
|
||||
bb.data.update_data(self.d)
|
||||
self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
|
||||
self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
|
||||
|
||||
def test_underscore_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_some_val", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
self.d.setVar("OVERRIDES", "foo:bar:some_val")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
class TestKeyExpansion(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("BAR", "foo")
|
||||
|
||||
def test_keyexpand(self):
|
||||
self.d.setVar("VAL_${FOO}", "A")
|
||||
self.d.setVar("VAL_${BAR}", "B")
|
||||
with LogRecord() as logs:
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
|
||||
self.assertEqual(self.d.getVar("VAL_foo", True), "A")
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -399,48 +290,12 @@ class TestFlags(unittest.TestCase):
|
||||
self.d.setVarFlag("foo", "flag2", "value of flag2")
|
||||
|
||||
def test_setflag(self):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
|
||||
|
||||
def test_delflag(self):
|
||||
self.d.delVarFlag("foo", "flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
class Contains(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("SOMEFLAG", "a b c")
|
||||
|
||||
def test_contains(self):
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
|
||||
|
||||
def test_contains_any(self):
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
|
||||
|
||||
@@ -22,10 +22,8 @@
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import collections
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -134,10 +132,10 @@ class URITest(unittest.TestCase):
|
||||
'userinfo': 'anoncvs:anonymous',
|
||||
'username': 'anoncvs',
|
||||
'password': 'anonymous',
|
||||
'params': collections.OrderedDict([
|
||||
('tag', 'V0-99-81'),
|
||||
('module', 'familiar/dist/ipkg')
|
||||
]),
|
||||
'params': {
|
||||
'tag': 'V0-99-81',
|
||||
'module': 'familiar/dist/ipkg'
|
||||
},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
@@ -229,38 +227,7 @@ class URITest(unittest.TestCase):
|
||||
'params': {},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
"http://somesite.net;someparam=1": {
|
||||
'uri': 'http://somesite.net;someparam=1',
|
||||
'scheme': 'http',
|
||||
'hostname': 'somesite.net',
|
||||
'port': None,
|
||||
'hostport': 'somesite.net',
|
||||
'path': '',
|
||||
'userinfo': '',
|
||||
'userinfo': '',
|
||||
'username': '',
|
||||
'password': '',
|
||||
'params': {"someparam" : "1"},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
"file://somelocation;someparam=1": {
|
||||
'uri': 'file:somelocation;someparam=1',
|
||||
'scheme': 'file',
|
||||
'hostname': '',
|
||||
'port': None,
|
||||
'hostport': '',
|
||||
'path': 'somelocation',
|
||||
'userinfo': '',
|
||||
'userinfo': '',
|
||||
'username': '',
|
||||
'password': '',
|
||||
'params': {"someparam" : "1"},
|
||||
'query': {},
|
||||
'relative': True
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def test_uri(self):
|
||||
@@ -347,7 +314,6 @@ class URITest(unittest.TestCase):
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
@@ -359,7 +325,6 @@ class FetcherTest(unittest.TestCase):
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
@@ -425,33 +390,11 @@ class MirrorUriTest(FetcherTest):
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_mirror_of_mirror(self):
|
||||
# Test if mirror of a mirror works
|
||||
mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
|
||||
mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
|
||||
'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
|
||||
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
|
||||
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \
|
||||
"https://.*/[^/]* https://BBBB/B/B/B/ \n"
|
||||
|
||||
def test_recursive(self):
|
||||
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz',
|
||||
'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz',
|
||||
'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
|
||||
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
def touch(fn):
|
||||
with open(fn, 'a'):
|
||||
with file(fn, 'a'):
|
||||
os.utime(fn, None)
|
||||
|
||||
super(FetcherLocalTest, self).setUp()
|
||||
@@ -483,7 +426,9 @@ class FetcherLocalTest(FetcherTest):
|
||||
|
||||
def test_local_wildcard(self):
|
||||
tree = self.fetchUnpack(['file://a', 'file://dir/*'])
|
||||
self.assertEqual(tree, ['a', 'dir/c', 'dir/d', 'dir/subdir/e'])
|
||||
# FIXME: this is broken - it should return ['a', 'dir/c', 'dir/d', 'dir/subdir/e']
|
||||
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6128
|
||||
self.assertEqual(tree, ['a', 'b', 'dir/c', 'dir/d', 'dir/subdir/e'])
|
||||
|
||||
def test_local_dir(self):
|
||||
tree = self.fetchUnpack(['file://a', 'file://dir'])
|
||||
@@ -491,19 +436,14 @@ class FetcherLocalTest(FetcherTest):
|
||||
|
||||
def test_local_subdir(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
# FIXME: this is broken - it should return ['dir/subdir/e']
|
||||
# see https://bugzilla.yoctoproject.org/show_bug.cgi?id=6129
|
||||
self.assertEqual(tree, ['subdir/e'])
|
||||
|
||||
def test_local_subdir_file(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
|
||||
def test_local_subdirparam(self):
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar', 'file://dir;subdir=foo/moo'])
|
||||
self.assertEqual(tree, ['bar/a', 'foo/moo/dir/c', 'foo/moo/dir/d', 'foo/moo/dir/subdir/e'])
|
||||
|
||||
def test_local_deepsubdirparam(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/dir/subdir/e'])
|
||||
|
||||
class FetcherNetworkTest(FetcherTest):
|
||||
|
||||
@@ -528,19 +468,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_file_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
os.mkdir(self.dldir + "/some2where")
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
@@ -612,68 +539,17 @@ class FetcherNetworkTest(FetcherTest):
|
||||
os.chdir(os.path.dirname(self.unpackdir))
|
||||
fetcher.unpack(self.unpackdir)
|
||||
|
||||
|
||||
class TrustedNetworksTest(FetcherTest):
|
||||
def test_trusted_network(self):
|
||||
# Ensure trusted_network returns False when the host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the *.host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_two_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://something.git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_port_trusted_network(self):
|
||||
# Ensure trusted_network returns True, even if the url specifies a port.
|
||||
url = "git://someserver.org:8080/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://*.someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
|
||||
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
|
||||
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
|
||||
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'})
|
||||
}
|
||||
# we require a pathname to encodeurl but users can still pass such urls to
|
||||
# decodeurl and we need to handle them
|
||||
decodedata = datatable.copy()
|
||||
decodedata.update({
|
||||
"http://somesite.net;someparam=1": ('http', 'somesite.net', '', '', '', {'someparam': '1'}),
|
||||
})
|
||||
|
||||
def test_decodeurl(self):
|
||||
for k, v in self.decodedata.items():
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.decodeurl(k)
|
||||
self.assertEqual(result, v)
|
||||
|
||||
@@ -682,132 +558,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchLatestVersionTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "https://github.com/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver= ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("UPSTREAM_CHECK_URI", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver = ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
|
||||
class FetchCheckStatusTest(FetcherTest):
|
||||
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
|
||||
"http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
|
||||
"http://www.cups.org/",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
|
||||
"https://yoctoproject.org/",
|
||||
"https://yoctoproject.org/documentation",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
|
||||
# GitHub releases are hosted on Amazon S3, which doesn't support HEAD
|
||||
"https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
|
||||
]
|
||||
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
|
||||
def test_wget_checkstatus(self):
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d)
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
|
||||
def test_wget_checkstatus_connection_cache(self):
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
connection_cache = FetchConnectionCache()
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d,
|
||||
connection_cache = connection_cache)
|
||||
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
connection_cache.close_connections()
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Test for lib/bb/parse/
|
||||
#
|
||||
# Copyright (C) 2015 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import logging
|
||||
import bb
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('BitBake.TestParse')
|
||||
|
||||
import bb.parse
|
||||
import bb.data
|
||||
import bb.siggen
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
|
||||
testfile = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
do_install() {
|
||||
echo "hello"
|
||||
}
|
||||
|
||||
C = "3"
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
bb.parse.siggen = bb.siggen.init(self.d)
|
||||
|
||||
def parsehelper(self, content, suffix = ".bb"):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = suffix)
|
||||
f.write(content)
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
def test_parse_simple(self):
|
||||
f = self.parsehelper(self.testfile)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), "1")
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
self.assertEqual(d.getVar("C", True), "3")
|
||||
|
||||
def test_parse_incomplete_function(self):
|
||||
testfileB = self.testfile.replace("}", "")
|
||||
f = self.parsehelper(testfileB)
|
||||
with self.assertRaises(bb.parse.ParseError):
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
|
||||
overridetest = """
|
||||
RRECOMMENDS_${PN} = "a"
|
||||
RRECOMMENDS_${PN}_libc = "b"
|
||||
OVERRIDES = "libc:${PN}"
|
||||
PN = "gtk+"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
d.setVar("RRECOMMENDS_gtk+", "c")
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
|
||||
|
||||
overridetest2 = """
|
||||
EXTRA_OECONF = ""
|
||||
EXTRA_OECONF_class-target = "b"
|
||||
EXTRA_OECONF_append = " c"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest2)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
d.appendVar("EXTRA_OECONF", " d")
|
||||
d.setVar("OVERRIDES", "class-target")
|
||||
self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
|
||||
|
||||
overridetest3 = """
|
||||
DESCRIPTION = "A"
|
||||
DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
|
||||
PN = "bc"
|
||||
"""
|
||||
|
||||
def test_parse_combinations(self):
|
||||
f = self.parsehelper(self.overridetest3)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
|
||||
d.setVar("DESCRIPTION", "E")
|
||||
d.setVar("DESCRIPTION_bc-dev", "C D")
|
||||
d.setVar("OVERRIDES", "bc-dev")
|
||||
self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
|
||||
|
||||
|
||||
classextend = """
|
||||
VAR_var_override1 = "B"
|
||||
EXTRA = ":override1"
|
||||
OVERRIDES = "nothing${EXTRA}"
|
||||
|
||||
BBCLASSEXTEND = "###CLASS###"
|
||||
"""
|
||||
classextend_bbclass = """
|
||||
EXTRA = ""
|
||||
python () {
|
||||
d.renameVar("VAR_var", "VAR_var2")
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# Test based upon a real world data corruption issue. One
|
||||
# data store changing a variable poked through into a different data
|
||||
# store. This test case replicates that issue where the value 'B' would
|
||||
# become unset/disappear.
|
||||
#
|
||||
def test_parse_classextend_contamination(self):
|
||||
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
|
||||
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
|
||||
self.classextend = self.classextend.replace("###CLASS###", cls.name)
|
||||
f = self.parsehelper(self.classextend)
|
||||
alldata = bb.parse.handle(f.name, self.d)
|
||||
d1 = alldata['']
|
||||
d2 = alldata[cls.name]
|
||||
self.assertEqual(d1.getVar("VAR_var", True), "B")
|
||||
self.assertEqual(d2.getVar("VAR_var", True), None)
|
||||
|
||||
@@ -22,8 +22,6 @@
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
import tempfile
|
||||
import re
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -38,10 +36,6 @@ class VerCmpString(unittest.TestCase):
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
|
||||
self.assertTrue(result > 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
@@ -107,497 +101,3 @@ class Path(unittest.TestCase):
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
|
||||
class EditMetadataFile(unittest.TestCase):
|
||||
_origfile = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
def _testeditfile(self, varvalues, compareto, dummyvars=None):
|
||||
if dummyvars is None:
|
||||
dummyvars = []
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(self._origfile)
|
||||
tf.close()
|
||||
try:
|
||||
varcalls = []
|
||||
def handle_file(varname, origvalue, op, newlines):
|
||||
self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
|
||||
self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
|
||||
varcalls.append(varname)
|
||||
return varvalues[varname]
|
||||
|
||||
bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
|
||||
with open(tf.name) as f:
|
||||
modfile = f.readlines()
|
||||
# Ensure the output matches the expected output
|
||||
self.assertEqual(compareto.splitlines(True), modfile)
|
||||
# Ensure the callback function was called for every variable we asked for
|
||||
# (plus allow testing behaviour when a requested variable is not present)
|
||||
self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_edit_metadata_file_nochange(self):
|
||||
# Test file doesn't get modified with nothing to do
|
||||
self._testeditfile({}, self._origfile)
|
||||
# Test file doesn't get modified with only dummy variables
|
||||
self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
|
||||
'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
|
||||
# Test file doesn't get modified with some the same values
|
||||
self._testeditfile({'THIS': ('that', None, 0, True),
|
||||
'OTHER': ('anothervalue', None, 0, True),
|
||||
'MULTILINE3': (' c1 c2 c3 ', None, 4, False)}, self._origfile)
|
||||
|
||||
def test_edit_metadata_file_1(self):
|
||||
|
||||
newfile1 = """
|
||||
# A comment
|
||||
HELLO = "newvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
|
||||
|
||||
|
||||
def test_edit_metadata_file_2(self):
|
||||
|
||||
newfile2 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = " \\
|
||||
d1 \\
|
||||
d2 \\
|
||||
d3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = "nowsingle"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
|
||||
'MULTILINE3': ('nowsingle', None, 4, True),
|
||||
'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
|
||||
|
||||
|
||||
def test_edit_metadata_file_3(self):
|
||||
|
||||
newfile3 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = "yetanothervalue"
|
||||
|
||||
MULTILINE = "e1 \\
|
||||
e2 \\
|
||||
e3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := "f1 \\
|
||||
\tf2 \\
|
||||
\t"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
othercommand_one a b c
|
||||
othercommand_two d e f
|
||||
}
|
||||
"""
|
||||
|
||||
self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
|
||||
'MULTILINE2': (['f1', 'f2'], None, '\t', True),
|
||||
'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
|
||||
'THIS': (None, None, 0, False),
|
||||
'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
|
||||
|
||||
|
||||
def test_edit_metadata_file_4(self):
|
||||
|
||||
newfile4 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
"""
|
||||
|
||||
self._testeditfile({'NOCHANGE': (None, None, 0, False),
|
||||
'MULTILINE3': (None, None, 0, False),
|
||||
'THIS': ('that', None, 0, False),
|
||||
'do_functionname()': (None, None, 0, False)}, newfile4)
|
||||
|
||||
|
||||
def test_edit_metadata(self):
|
||||
newfile5 = """
|
||||
# A comment
|
||||
HELLO = "hithere"
|
||||
|
||||
# A new comment
|
||||
THIS += "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
NEWVAR = "value"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname == 'THIS':
|
||||
newlines.append('# A new comment\n')
|
||||
elif varname == 'do_functionname()':
|
||||
newlines.append('NEWVAR = "value"\n')
|
||||
newlines.append('\n')
|
||||
valueitem = varvalues.get(varname, None)
|
||||
if valueitem:
|
||||
return valueitem
|
||||
else:
|
||||
return (origvalue, op, 0, True)
|
||||
|
||||
varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
|
||||
varlist = ['HELLO', 'THIS', 'do_functionname()']
|
||||
(updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
|
||||
self.assertTrue(updated, 'List should be updated but isn\'t')
|
||||
self.assertEqual(newlines, newfile5.splitlines(True))
|
||||
|
||||
# Make sure the orig value matches what we expect it to be
|
||||
def test_edit_metadata_origvalue(self):
|
||||
origfile = """
|
||||
MULTILINE = " stuff \\
|
||||
morestuff"
|
||||
"""
|
||||
expected_value = "stuff morestuff"
|
||||
global value_in_callback
|
||||
value_in_callback = ""
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
global value_in_callback
|
||||
value_in_callback = origvalue
|
||||
return (origvalue, op, -1, False)
|
||||
|
||||
bb.utils.edit_metadata(origfile.splitlines(True),
|
||||
['MULTILINE'],
|
||||
handle_var)
|
||||
|
||||
testvalue = re.sub('\s+', ' ', value_in_callback.strip())
|
||||
self.assertEqual(expected_value, testvalue)
|
||||
|
||||
class EditBbLayersConf(unittest.TestCase):
|
||||
|
||||
def _test_bblayers_edit(self, before, after, add, remove, notadded, notremoved):
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(before)
|
||||
tf.close()
|
||||
try:
|
||||
actual_notadded, actual_notremoved = bb.utils.edit_bblayers_conf(tf.name, add, remove)
|
||||
with open(tf.name) as f:
|
||||
actual_after = f.readlines()
|
||||
self.assertEqual(after.splitlines(True), actual_after)
|
||||
self.assertEqual(notadded, actual_notadded)
|
||||
self.assertEqual(notremoved, actual_notremoved)
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_bblayers_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
None,
|
||||
'/home/user/path/layer2',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/other/path/to/layer5/',
|
||||
None,
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/other/path/to/layer5', '/home/user/path/layer2/'], '/home/user/path/subpath/layer3/',
|
||||
['/home/user/path/layer2'],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_home(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer1 \
|
||||
~/path/layer2 \
|
||||
~/otherpath/layer3 \
|
||||
~/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer2 \
|
||||
~/path/layer4 \
|
||||
~/path2/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
[os.environ['HOME'] + '/path/layer4', '~/path2/layer5'],
|
||||
[os.environ['HOME'] + '/otherpath/layer3', '~/path/layer1', '~/path/notinlist'],
|
||||
[os.environ['HOME'] + '/path/layer4'],
|
||||
['~/path/notinlist'])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/home/user/path/layer3',
|
||||
'/home/user/path/layer1',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals2(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer4"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
BBLAYERS += "/home/user/otherpath/layer6"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/home/user/otherpath/layer6', '/home/user/path/layer3'], ['/home/user/path/layer1', '/home/user/path/layer4', '/home/user/path/layer7'],
|
||||
['/home/user/path/layer3'],
|
||||
['/home/user/path/layer7'])
|
||||
|
||||
@@ -25,33 +25,30 @@ import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state, BBCooker, CookerFeatures
|
||||
from bb.cooker import state, BBCooker
|
||||
from bb.cookerdata import CookerConfiguration, ConfigParameters
|
||||
import bb.fetch2
|
||||
|
||||
class Tinfoil:
|
||||
def __init__(self, output=sys.stdout, tracking=False):
|
||||
def __init__(self, output=sys.stdout):
|
||||
# Needed to avoid deprecation warnings with python 2.6
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
# Set up logging
|
||||
self.logger = logging.getLogger('BitBake')
|
||||
self._log_hdlr = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(self._log_hdlr)
|
||||
console = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
self._log_hdlr.setFormatter(format)
|
||||
self.logger.addHandler(self._log_hdlr)
|
||||
console.setFormatter(format)
|
||||
self.logger.addHandler(console)
|
||||
|
||||
self.config = CookerConfiguration()
|
||||
configparams = TinfoilConfigParameters(parse_only=True)
|
||||
self.config.setConfigParameters(configparams)
|
||||
self.config.setServerRegIdleCallback(self.register_idle_function)
|
||||
features = []
|
||||
if tracking:
|
||||
features.append(CookerFeatures.BASEDATASTORE_TRACKING)
|
||||
self.cooker = BBCooker(self.config, features)
|
||||
self.cooker = BBCooker(self.config)
|
||||
self.config_data = self.cooker.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
@@ -84,19 +81,13 @@ class Tinfoil:
|
||||
else:
|
||||
self.parseRecipes()
|
||||
|
||||
def shutdown(self):
|
||||
self.cooker.shutdown(force=True)
|
||||
self.cooker.post_serve()
|
||||
self.cooker.unlockBitbake()
|
||||
self.logger.removeHandler(self._log_hdlr)
|
||||
|
||||
class TinfoilConfigParameters(ConfigParameters):
|
||||
|
||||
def __init__(self, **options):
|
||||
self.initial_options = options
|
||||
super(TinfoilConfigParameters, self).__init__()
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
class DummyOptions:
|
||||
def __init__(self, initial_options):
|
||||
for key, val in initial_options.items():
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
437
bitbake/lib/bb/ui/crumbs/builddetailspage.py
Executable file
437
bitbake/lib/bb/ui/crumbs/builddetailspage.py
Executable file
@@ -0,0 +1,437 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import pango
|
||||
import gobject
|
||||
import bb.process
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton, HobInfoButton
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.runningbuild import BuildFailureTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class BuildConfigurationTreeView(gtk.TreeView):
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.set_rules_hint(False)
|
||||
self.set_headers_visible(False)
|
||||
self.set_property("hover-expand", True)
|
||||
self.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer0 = gtk.CellRendererText()
|
||||
renderer0.set_property('font-desc', pango.FontDescription('courier bold 12'))
|
||||
col0 = gtk.TreeViewColumn ("Name", renderer0, text=0)
|
||||
self.append_column (col0)
|
||||
|
||||
# The message of configuration.
|
||||
renderer1 = HobWarpCellRendererText(col_number=1)
|
||||
col1 = gtk.TreeViewColumn ("Values", renderer1, text=1)
|
||||
self.append_column (col1)
|
||||
|
||||
def set_vars(self, key="", var=[""]):
|
||||
d = {}
|
||||
if type(var) == str:
|
||||
d = {key: [var]}
|
||||
elif type(var) == list and len(var) > 1:
|
||||
#create the sub item line
|
||||
l = []
|
||||
text = ""
|
||||
for item in var:
|
||||
text = " - " + item
|
||||
l.append(text)
|
||||
d = {key: var}
|
||||
|
||||
return d
|
||||
|
||||
def set_config_model(self, show_vars):
|
||||
listmodel = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
parent = None
|
||||
for var in show_vars:
|
||||
for subitem in var.items():
|
||||
name = subitem[0]
|
||||
is_parent = True
|
||||
for value in subitem[1]:
|
||||
if is_parent:
|
||||
parent = listmodel.append(parent, (name, value))
|
||||
is_parent = False
|
||||
else:
|
||||
listmodel.append(parent, (None, value))
|
||||
name = " - "
|
||||
parent = None
|
||||
# renew the tree model after get the configuration messages
|
||||
self.set_model(listmodel)
|
||||
|
||||
def show(self, src_config_info, src_params):
|
||||
vars = []
|
||||
vars.append(self.set_vars("BB version:", src_params.bb_version))
|
||||
vars.append(self.set_vars("Target arch:", src_params.target_arch))
|
||||
vars.append(self.set_vars("Target OS:", src_params.target_os))
|
||||
vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
|
||||
vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
|
||||
vars.append(self.set_vars("Distro version:", src_params.distro_version))
|
||||
vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
|
||||
vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
|
||||
vars.append(self.set_vars("Layers:", src_config_info.layers))
|
||||
|
||||
for path in src_config_info.layers:
|
||||
import os, os.path
|
||||
if os.path.exists(path):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
if branch.startswith("fatal:"):
|
||||
branch = "(unknown)"
|
||||
if branch:
|
||||
branch = branch.strip('\n')
|
||||
vars.append(self.set_vars("Branch:", branch))
|
||||
break
|
||||
|
||||
self.set_config_model(vars)
|
||||
|
||||
def reset(self):
|
||||
self.set_model(None)
|
||||
|
||||
#
|
||||
# BuildDetailsPage
|
||||
#
|
||||
|
||||
class BuildDetailsPage (HobPage):
|
||||
|
||||
def __init__(self, builder):
|
||||
super(BuildDetailsPage, self).__init__(builder, "Building ...")
|
||||
|
||||
self.num_of_issues = 0
|
||||
self.endpath = (0,)
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.vbox = gtk.VBox(False, 12)
|
||||
|
||||
self.progress_box = gtk.VBox(False, 12)
|
||||
self.task_status = gtk.Label("\n") # to ensure layout is correct
|
||||
self.task_status.set_alignment(0.0, 0.5)
|
||||
self.progress_box.pack_start(self.task_status, expand=False, fill=False)
|
||||
self.progress_hbox = gtk.HBox(False, 6)
|
||||
self.progress_box.pack_end(self.progress_hbox, expand=True, fill=True)
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.progress_hbox.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.progress_hbox.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.notebook = HobNotebook()
|
||||
self.config_tv = BuildConfigurationTreeView()
|
||||
self.scrolled_view_config = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_config.add(self.config_tv)
|
||||
self.notebook.append_page(self.scrolled_view_config, "Build configuration")
|
||||
|
||||
self.failure_tv = BuildFailureTreeView()
|
||||
self.failure_model = self.builder.handler.build.model.failure_model()
|
||||
self.failure_tv.set_model(self.failure_model)
|
||||
self.scrolled_view_failure = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_failure.add(self.failure_tv)
|
||||
self.notebook.append_page(self.scrolled_view_failure, "Issues")
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view_build = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_build.add(self.build_tv)
|
||||
self.notebook.append_page(self.scrolled_view_build, "Log")
|
||||
|
||||
self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
|
||||
|
||||
self.button_box = gtk.HBox(False, 6)
|
||||
self.back_button = HobAltButton('<< Back')
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def update_build_status(self, current, total, task):
|
||||
recipe_path, recipe_task = task.split(", ")
|
||||
recipe = os.path.basename(recipe_path).rstrip(".bb")
|
||||
tsk_msg = "<b>Running task %s of %s:</b> %s\n<b>Recipe:</b> %s" % (current, total, recipe_task, recipe)
|
||||
self.task_status.set_markup(tsk_msg)
|
||||
self.stop_button.set_sensitive(True)
|
||||
|
||||
def reset_build_status(self):
|
||||
self.task_status.set_markup("\n") # to ensure layout is correct
|
||||
self.endpath = (0,)
|
||||
|
||||
def show_issues(self):
|
||||
self.num_of_issues += 1
|
||||
self.notebook.show_indicator_icon("Issues", self.num_of_issues)
|
||||
self.notebook.queue_draw()
|
||||
|
||||
def reset_issues(self):
|
||||
self.num_of_issues = 0
|
||||
self.notebook.hide_indicator_icon("Issues")
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.vbox.get_children() or []
|
||||
for child in children:
|
||||
self.vbox.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def add_build_fail_top_bar(self, actions, log_file=None):
|
||||
primary_action = "Edit %s" % actions
|
||||
|
||||
color = HobColors.ERROR
|
||||
build_fail_top = gtk.EventBox()
|
||||
#build_fail_top.set_size_request(-1, 200)
|
||||
build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
|
||||
build_fail_tab = gtk.Table(14, 46, True)
|
||||
build_fail_top.add(build_fail_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_fail_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_fail_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
# Ensure variable disk_full is defined
|
||||
if not hasattr(self.builder, 'disk_full'):
|
||||
self.builder.disk_full = False
|
||||
|
||||
if self.builder.disk_full:
|
||||
markup = "<span size='medium'>There is no disk space left, so Hob cannot finish building your image. Free up some disk space\n"
|
||||
markup += "and restart the build. Check the \"Issues\" tab for more details</span>"
|
||||
label.set_markup(markup)
|
||||
else:
|
||||
label.set_markup("<span size='medium'>Check the \"Issues\" information for more details</span>")
|
||||
build_fail_tab.attach(label, 4, 40, 4, 9)
|
||||
|
||||
# create button 'Edit packages'
|
||||
action_button = HobButton(primary_action)
|
||||
#action_button.set_size_request(-1, 40)
|
||||
action_button.set_tooltip_text("Edit the %s parameters" % actions)
|
||||
action_button.connect('clicked', self.failure_primary_action_button_clicked_cb, primary_action)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
file_bug_button = HobAltButton('File a bug')
|
||||
file_bug_button.set_relief(gtk.RELIEF_HALF)
|
||||
file_bug_button.set_tooltip_text("Open the Yocto Project bug tracking website")
|
||||
file_bug_button.connect('clicked', self.failure_activate_file_bug_link_cb)
|
||||
|
||||
if not self.builder.disk_full:
|
||||
build_fail_tab.attach(action_button, 4, 13, 9, 12)
|
||||
if log_file:
|
||||
build_fail_tab.attach(open_log_button, 14, 23, 9, 12)
|
||||
build_fail_tab.attach(file_bug_button, attach_pos, attach_pos + 9, 9, 12)
|
||||
|
||||
else:
|
||||
restart_build = HobButton("Restart the build")
|
||||
restart_build.set_tooltip_text("Restart the build")
|
||||
restart_build.connect('clicked', self.restart_build_button_clicked_cb)
|
||||
|
||||
build_fail_tab.attach(restart_build, 4, 13, 9, 12)
|
||||
build_fail_tab.attach(action_button, 14, 23, 9, 12)
|
||||
if log_file:
|
||||
build_fail_tab.attach(open_log_button, attach_pos, attach_pos + 9, 9, 12)
|
||||
|
||||
self.builder.disk_full = False
|
||||
return build_fail_top
|
||||
|
||||
def show_fail_page(self, title):
|
||||
self._remove_all_widget()
|
||||
self.title = "Hob cannot build your %s" % title
|
||||
|
||||
self.build_fail_bar = self.add_build_fail_top_bar(title, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_fail_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.notebook.set_page("Issues")
|
||||
self.back_button.hide()
|
||||
|
||||
def add_build_stop_top_bar(self, action, log_file=None):
|
||||
color = HobColors.LIGHT_GRAY
|
||||
build_stop_top = gtk.EventBox()
|
||||
#build_stop_top.set_size_request(-1, 200)
|
||||
build_stop_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
build_stop_top.set_flags(gtk.CAN_DEFAULT)
|
||||
build_stop_top.grab_default()
|
||||
|
||||
build_stop_tab = gtk.Table(11, 46, True)
|
||||
build_stop_top.add(build_stop_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INFO_HOVER_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_stop_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_stop_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
action_button = HobButton("Edit %s" % action)
|
||||
action_button.set_size_request(-1, 40)
|
||||
if action == "image":
|
||||
action_button.set_tooltip_text("Edit the image parameters")
|
||||
elif action == "recipes":
|
||||
action_button.set_tooltip_text("Edit the included recipes")
|
||||
elif action == "packages":
|
||||
action_button.set_tooltip_text("Edit the included packages")
|
||||
action_button.connect('clicked', self.stop_primary_action_button_clicked_cb, action)
|
||||
build_stop_tab.attach(action_button, 4, 13, 6, 9)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
build_stop_tab.attach(open_log_button, 14, 23, 6, 9)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
build_button = HobAltButton("Build new image")
|
||||
#build_button.set_size_request(-1, 40)
|
||||
build_button.set_tooltip_text("Create a new image from scratch")
|
||||
build_button.connect('clicked', self.new_image_button_clicked_cb)
|
||||
build_stop_tab.attach(build_button, attach_pos, attach_pos + 9, 6, 9)
|
||||
|
||||
return build_stop_top, action_button
|
||||
|
||||
def show_stop_page(self, action):
|
||||
self._remove_all_widget()
|
||||
self.title = "Build stopped"
|
||||
self.build_stop_bar, action_button = self.add_build_stop_top_bar(action, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_stop_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
return action_button
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
self.title = "Building packages ..."
|
||||
else:
|
||||
self.title = "Building image ..."
|
||||
self.build_details_top = self.add_onto_top_bar(None)
|
||||
self.pack_start(self.build_details_top, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.progress_bar.reset()
|
||||
self.config_tv.reset()
|
||||
self.vbox.pack_start(self.progress_box, expand=False, fill=False)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.notebook.set_page("Log")
|
||||
self.back_button.hide()
|
||||
|
||||
self.reset_build_status()
|
||||
self.reset_issues()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def new_image_button_clicked_cb(self, button):
|
||||
self.builder.reset()
|
||||
|
||||
def show_back_button(self):
|
||||
self.back_button.show()
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.builder.stop_build()
|
||||
|
||||
def hide_stop_button(self):
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.stop_button.hide()
|
||||
|
||||
def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
|
||||
if treeview and v_adj:
|
||||
if path[0] > self.endpath[0]: # check the event is a new row append or not
|
||||
self.endpath = path
|
||||
# check the gtk.adjustment position is at end boundary or not
|
||||
if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
|
||||
treeview.scroll_to_cell(path)
|
||||
|
||||
def show_configurations(self, configurations, params):
|
||||
self.config_tv.show(configurations, params)
|
||||
|
||||
def failure_primary_action_button_clicked_cb(self, button, action):
|
||||
if "Edit recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "Edit packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "Edit image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def restart_build_button_clicked_cb(self, button):
|
||||
self.builder.just_bake()
|
||||
|
||||
def stop_primary_action_button_clicked_cb(self, button, action):
|
||||
if "recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def open_log_button_clicked_cb(self, button, log_file):
|
||||
if log_file:
|
||||
log_file = "file:///" + log_file
|
||||
gtk.show_uri(screen=button.get_screen(), uri=log_file, timestamp=0)
|
||||
|
||||
def failure_activate_file_bug_link_cb(self, button):
|
||||
button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
|
||||
1475
bitbake/lib/bb/ui/crumbs/builder.py
Executable file
1475
bitbake/lib/bb/ui/crumbs/builder.py
Executable file
File diff suppressed because it is too large
Load Diff
455
bitbake/lib/bb/ui/crumbs/buildmanager.py
Normal file
455
bitbake/lib/bb/ui/crumbs/buildmanager.py
Normal file
@@ -0,0 +1,455 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import threading
|
||||
import os
|
||||
import datetime
|
||||
import time
|
||||
|
||||
class BuildConfiguration:
|
||||
""" Represents a potential *or* historic *or* concrete build. It
|
||||
encompasses all the things that we need to tell bitbake to do to make it
|
||||
build what we want it to build.
|
||||
|
||||
It also stored the metadata URL and the set of possible machines (and the
|
||||
distros / images / uris for these. Apart from the metdata URL these are
|
||||
not serialised to file (since they may be transient). In some ways this
|
||||
functionality might be shifted to the loader class."""
|
||||
|
||||
def __init__ (self):
|
||||
self.metadata_url = None
|
||||
|
||||
# Tuple of (distros, image, urls)
|
||||
self.machine_options = {}
|
||||
|
||||
self.machine = None
|
||||
self.distro = None
|
||||
self.image = None
|
||||
self.urls = []
|
||||
self.extra_urls = []
|
||||
self.extra_pkgs = []
|
||||
|
||||
def get_machines_model (self):
|
||||
model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
for machine in self.machine_options.keys():
|
||||
model.append ([machine])
|
||||
|
||||
return model
|
||||
|
||||
def get_distro_and_images_models (self, machine):
|
||||
distro_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for distro in self.machine_options[machine][0]:
|
||||
distro_model.append ([distro])
|
||||
|
||||
image_model = gtk.ListStore (gobject.TYPE_STRING)
|
||||
|
||||
for image in self.machine_options[machine][1]:
|
||||
image_model.append ([image])
|
||||
|
||||
return (distro_model, image_model)
|
||||
|
||||
def get_repos (self):
|
||||
self.urls = self.machine_options[self.machine][2]
|
||||
return self.urls
|
||||
|
||||
# It might be a lot lot better if we stored these in like, bitbake conf
|
||||
# file format.
|
||||
@staticmethod
|
||||
def load_from_file (filename):
|
||||
|
||||
conf = BuildConfiguration()
|
||||
with open(filename, "r") as f:
|
||||
for line in f:
|
||||
data = line.split (";")[1]
|
||||
if (line.startswith ("metadata-url;")):
|
||||
conf.metadata_url = data.strip()
|
||||
continue
|
||||
if (line.startswith ("url;")):
|
||||
conf.urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("extra-url;")):
|
||||
conf.extra_urls += [data.strip()]
|
||||
continue
|
||||
if (line.startswith ("machine;")):
|
||||
conf.machine = data.strip()
|
||||
continue
|
||||
if (line.startswith ("distribution;")):
|
||||
conf.distro = data.strip()
|
||||
continue
|
||||
if (line.startswith ("image;")):
|
||||
conf.image = data.strip()
|
||||
continue
|
||||
|
||||
return conf
|
||||
|
||||
# Serialise to a file. This is part of the build process and we use this
|
||||
# to be able to repeat a given build (using the same set of parameters)
|
||||
# but also so that we can include the details of the image / machine /
|
||||
# distro in the build manager tree view.
|
||||
def write_to_file (self, filename):
|
||||
f = open (filename, "w")
|
||||
|
||||
lines = []
|
||||
|
||||
if (self.metadata_url):
|
||||
lines += ["metadata-url;%s\n" % (self.metadata_url)]
|
||||
|
||||
for url in self.urls:
|
||||
lines += ["url;%s\n" % (url)]
|
||||
|
||||
for url in self.extra_urls:
|
||||
lines += ["extra-url;%s\n" % (url)]
|
||||
|
||||
if (self.machine):
|
||||
lines += ["machine;%s\n" % (self.machine)]
|
||||
|
||||
if (self.distro):
|
||||
lines += ["distribution;%s\n" % (self.distro)]
|
||||
|
||||
if (self.image):
|
||||
lines += ["image;%s\n" % (self.image)]
|
||||
|
||||
f.writelines (lines)
|
||||
f.close ()
|
||||
|
||||
class BuildResult(gobject.GObject):
|
||||
""" Represents an historic build. Perhaps not successful. But it includes
|
||||
things such as the files that are in the directory (the output from the
|
||||
build) as well as a deserialised BuildConfiguration file that is stored in
|
||||
".conf" in the directory for the build.
|
||||
|
||||
This is GObject so that it can be included in the TreeStore."""
|
||||
|
||||
(STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
|
||||
(0, 1, 2)
|
||||
|
||||
def __init__ (self, parent, identifier):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.date = None
|
||||
|
||||
self.files = []
|
||||
self.status = None
|
||||
self.identifier = identifier
|
||||
self.path = os.path.join (parent, identifier)
|
||||
|
||||
# Extract the date, since the directory name is of the
|
||||
# format build-<year><month><day>-<ordinal> we can easily
|
||||
# pull it out.
|
||||
# TODO: Better to stat a file?
|
||||
(_, date, revision) = identifier.split ("-")
|
||||
print(date)
|
||||
|
||||
year = int (date[0:4])
|
||||
month = int (date[4:6])
|
||||
day = int (date[6:8])
|
||||
|
||||
self.date = datetime.date (year, month, day)
|
||||
|
||||
self.conf = None
|
||||
|
||||
# By default builds are STATE_FAILED unless we find a "complete" file
|
||||
# in which case they are STATE_COMPLETE
|
||||
self.state = BuildResult.STATE_FAILED
|
||||
for file in os.listdir (self.path):
|
||||
if (file.startswith (".conf")):
|
||||
conffile = os.path.join (self.path, file)
|
||||
self.conf = BuildConfiguration.load_from_file (conffile)
|
||||
elif (file.startswith ("complete")):
|
||||
self.state = BuildResult.STATE_COMPLETE
|
||||
else:
|
||||
self.add_file (file)
|
||||
|
||||
def add_file (self, file):
|
||||
# Just add the file for now. Don't care about the type.
|
||||
self.files += [(file, None)]
|
||||
|
||||
class BuildManagerModel (gtk.TreeStore):
|
||||
""" Model for the BuildManagerTreeView. This derives from gtk.TreeStore
|
||||
but it abstracts nicely what the columns mean and the setup of the columns
|
||||
in the model. """
|
||||
|
||||
(COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
|
||||
(0, 1, 2, 3, 4, 5, 6)
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_OBJECT,
|
||||
gobject.TYPE_INT64,
|
||||
gobject.TYPE_INT)
|
||||
|
||||
class BuildManager (gobject.GObject):
|
||||
""" This class manages the historic builds that have been found in the
|
||||
"results" directory but is also used for starting a new build."""
|
||||
|
||||
__gsignals__ = {
|
||||
'population-finished' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'populate-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
())
|
||||
}
|
||||
|
||||
def update_build_result (self, result, iter):
|
||||
# Convert the date into something we can sort by.
|
||||
date = long (time.mktime (result.date.timetuple()))
|
||||
|
||||
# Add a top level entry for the build
|
||||
|
||||
self.model.set (iter,
|
||||
BuildManagerModel.COL_IDENT, result.identifier,
|
||||
BuildManagerModel.COL_DESC, result.conf.image,
|
||||
BuildManagerModel.COL_MACHINE, result.conf.machine,
|
||||
BuildManagerModel.COL_DISTRO, result.conf.distro,
|
||||
BuildManagerModel.COL_BUILD_RESULT, result,
|
||||
BuildManagerModel.COL_DATE, date,
|
||||
BuildManagerModel.COL_STATE, result.state)
|
||||
|
||||
# And then we use the files in the directory as the children for the
|
||||
# top level iter.
|
||||
for file in result.files:
|
||||
self.model.append (iter, (None, file[0], None, None, None, date, -1))
|
||||
|
||||
# This function is called as an idle by the BuildManagerPopulaterThread
|
||||
def add_build_result (self, result):
|
||||
gtk.gdk.threads_enter()
|
||||
self.known_builds += [result]
|
||||
|
||||
self.update_build_result (result, self.model.append (None))
|
||||
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
def notify_build_finished (self):
|
||||
# This is a bit of a hack. If we have a running build running then we
|
||||
# will have a row in the model in STATE_ONGOING. Find it and make it
|
||||
# as if it was a proper historic build (well, it is completed now....)
|
||||
|
||||
# We need to use the iters here rather than the Python iterator
|
||||
# interface to the model since we need to pass it into
|
||||
# update_build_result
|
||||
|
||||
iter = self.model.get_iter_first()
|
||||
|
||||
while (iter):
|
||||
(ident, state) = self.model.get(iter,
|
||||
BuildManagerModel.COL_IDENT,
|
||||
BuildManagerModel.COL_STATE)
|
||||
|
||||
if state == BuildResult.STATE_ONGOING:
|
||||
result = BuildResult (self.results_directory, ident)
|
||||
self.update_build_result (result, iter)
|
||||
iter = self.model.iter_next(iter)
|
||||
|
||||
def notify_build_succeeded (self):
|
||||
# Write the "complete" file so that when we create the BuildResult
|
||||
# object we put into the model
|
||||
|
||||
complete_file_path = os.path.join (self.cur_build_directory, "complete")
|
||||
f = file (complete_file_path, "w")
|
||||
f.close()
|
||||
self.notify_build_finished()
|
||||
|
||||
def notify_build_failed (self):
|
||||
# Without a "complete" file then this will mark the build as failed:
|
||||
self.notify_build_finished()
|
||||
|
||||
# This function is called as an idle
|
||||
def emit_population_finished_signal (self):
|
||||
gtk.gdk.threads_enter()
|
||||
self.emit ("population-finished")
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
class BuildManagerPopulaterThread (threading.Thread):
|
||||
def __init__ (self, manager, directory):
|
||||
threading.Thread.__init__ (self)
|
||||
self.manager = manager
|
||||
self.directory = directory
|
||||
|
||||
def run (self):
|
||||
# For each of the "build-<...>" directories ..
|
||||
|
||||
if os.path.exists (self.directory):
|
||||
for directory in os.listdir (self.directory):
|
||||
|
||||
if not directory.startswith ("build-"):
|
||||
continue
|
||||
|
||||
build_result = BuildResult (self.directory, directory)
|
||||
self.manager.add_build_result (build_result)
|
||||
|
||||
gobject.idle_add (BuildManager.emit_population_finished_signal,
|
||||
self.manager)
|
||||
|
||||
def __init__ (self, server, results_directory):
|
||||
gobject.GObject.__init__ (self)
|
||||
|
||||
# The builds that we've found from walking the result directory
|
||||
self.known_builds = []
|
||||
|
||||
# Save out the bitbake server, we need this for issuing commands to
|
||||
# the cooker:
|
||||
self.server = server
|
||||
|
||||
# The TreeStore that we use
|
||||
self.model = BuildManagerModel ()
|
||||
|
||||
# The results directory is where we create (and look for) the
|
||||
# build-<xyz>-<n> directories. We need to populate ourselves from
|
||||
# directory
|
||||
self.results_directory = results_directory
|
||||
self.populate_from_directory (self.results_directory)
|
||||
|
||||
def populate_from_directory (self, directory):
|
||||
thread = BuildManager.BuildManagerPopulaterThread (self, directory)
|
||||
thread.start()
|
||||
|
||||
# Come up with the name for the next build ident by combining "build-"
|
||||
# with the date formatted as yyyymmdd and then an ordinal. We do this by
|
||||
# an optimistic algorithm incrementing the ordinal if we find that it
|
||||
# already exists.
|
||||
def get_next_build_ident (self):
|
||||
today = datetime.date.today ()
|
||||
datestr = str (today.year) + str (today.month) + str (today.day)
|
||||
|
||||
revision = 0
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
while (os.path.exists (test_path)):
|
||||
revision += 1
|
||||
test_name = "build-%s-%d" % (datestr, revision)
|
||||
test_path = os.path.join (self.results_directory, test_name)
|
||||
|
||||
return test_name
|
||||
|
||||
# Take a BuildConfiguration and then try and build it based on the
|
||||
# parameters of that configuration. S
|
||||
def do_build (self, conf):
|
||||
server = self.server
|
||||
|
||||
# Work out the build directory. Note we actually create the
|
||||
# directories here since we need to write the ".conf" file. Otherwise
|
||||
# we could have relied on bitbake's builder thread to actually make
|
||||
# the directories as it proceeds with the build.
|
||||
ident = self.get_next_build_ident ()
|
||||
build_directory = os.path.join (self.results_directory,
|
||||
ident)
|
||||
self.cur_build_directory = build_directory
|
||||
os.makedirs (build_directory)
|
||||
|
||||
conffile = os.path.join (build_directory, ".conf")
|
||||
conf.write_to_file (conffile)
|
||||
|
||||
# Add a row to the model representing this ongoing build. It's kinda a
|
||||
# fake entry. If this build completes or fails then this gets updated
|
||||
# with the real stuff like the historic builds
|
||||
date = long (time.time())
|
||||
self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
|
||||
None, date, BuildResult.STATE_ONGOING))
|
||||
try:
|
||||
server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
|
||||
server.runCommand(["setVariable", "MACHINE", conf.machine])
|
||||
server.runCommand(["setVariable", "DISTRO", conf.distro])
|
||||
server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
|
||||
server.runCommand(["setVariable", "BBFILES", \
|
||||
"""${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
|
||||
server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
|
||||
server.runCommand(["setVariable", "IPK_FEED_URIS", \
|
||||
" ".join(conf.get_repos())])
|
||||
server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
|
||||
build_directory])
|
||||
server.runCommand(["buildTargets", [conf.image], "rootfs"])
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
class BuildManagerTreeView (gtk.TreeView):
|
||||
""" The tree view for the build manager. This shows the historic builds
|
||||
and so forth. """
|
||||
|
||||
# We use this function to control what goes in the cell since we store
|
||||
# the date in the model as seconds since the epoch (for sorting) and so we
|
||||
# need to make it human readable.
|
||||
def date_format_custom_cell_data_func (self, col, cell, model, iter):
|
||||
date = model.get (iter, BuildManagerModel.COL_DATE)[0]
|
||||
datestr = time.strftime("%A %d %B %Y", time.localtime(date))
|
||||
cell.set_property ("text", datestr)
|
||||
|
||||
# This format function controls what goes in the cell. We use this to map
|
||||
# the integer state to a string and also to colourise the text
|
||||
def state_format_custom_cell_data_fun (self, col, cell, model, iter):
|
||||
state = model.get (iter, BuildManagerModel.COL_STATE)[0]
|
||||
|
||||
if (state == BuildResult.STATE_ONGOING):
|
||||
cell.set_property ("text", "Active")
|
||||
cell.set_property ("foreground", "#000000")
|
||||
elif (state == BuildResult.STATE_FAILED):
|
||||
cell.set_property ("text", "Failed")
|
||||
cell.set_property ("foreground", "#ff0000")
|
||||
elif (state == BuildResult.STATE_COMPLETE):
|
||||
cell.set_property ("text", "Complete")
|
||||
cell.set_property ("foreground", "#00ff00")
|
||||
else:
|
||||
cell.set_property ("text", "")
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
|
||||
# Misc descriptiony thing
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn (None, renderer,
|
||||
text=BuildManagerModel.COL_DESC)
|
||||
self.append_column (col)
|
||||
|
||||
# Machine
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Machine", renderer,
|
||||
text=BuildManagerModel.COL_MACHINE)
|
||||
self.append_column (col)
|
||||
|
||||
# distro
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Distribution", renderer,
|
||||
text=BuildManagerModel.COL_DISTRO)
|
||||
self.append_column (col)
|
||||
|
||||
# date (using a custom function for formatting the cell contents it
|
||||
# takes epoch -> human readable string)
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Date", renderer,
|
||||
text=BuildManagerModel.COL_DATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.date_format_custom_cell_data_func)
|
||||
|
||||
# For status.
|
||||
renderer = gtk.CellRendererText ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer,
|
||||
text = BuildManagerModel.COL_STATE)
|
||||
self.append_column (col)
|
||||
col.set_cell_data_func (renderer,
|
||||
self.state_format_custom_cell_data_fun)
|
||||
341
bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
Normal file
341
bitbake/lib/bb/ui/crumbs/hig/advancedsettingsdialog.py
Normal file
@@ -0,0 +1,341 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import hashlib
|
||||
from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class AdvancedSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
|
||||
def details_cb(self, button, parent, protocol):
|
||||
dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
|
||||
user = self.configuration.proxies[protocol][1],
|
||||
passwd = self.configuration.proxies[protocol][2],
|
||||
parent = parent,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_OK:
|
||||
self.configuration.proxies[protocol][1] = dialog.user
|
||||
self.configuration.proxies[protocol][2] = dialog.passwd
|
||||
self.refresh_proxy_components()
|
||||
dialog.destroy()
|
||||
|
||||
def set_save_button(self, button):
|
||||
self.save_button = button
|
||||
|
||||
def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
|
||||
combo_item = self.rootfs_combo.get_active_text()
|
||||
modified = False
|
||||
for child in check_hbox.get_children():
|
||||
if isinstance(child, gtk.CheckButton):
|
||||
check_hbox.remove(child)
|
||||
modified = True
|
||||
for format in all_package_format:
|
||||
if format != combo_item:
|
||||
check_button = gtk.CheckButton(format)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
modified = True
|
||||
if modified:
|
||||
check_hbox.remove(self.pkgfmt_info)
|
||||
check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
|
||||
check_hbox.show_all()
|
||||
|
||||
def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
|
||||
pkgfmt_vbox = gtk.VBox(False, 6)
|
||||
|
||||
label = self.gen_label_widget("Root file system package format")
|
||||
pkgfmt_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
rootfs_format = ""
|
||||
if curr_package_format:
|
||||
rootfs_format = curr_package_format.split()[0]
|
||||
|
||||
rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
|
||||
pkgfmt_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
|
||||
|
||||
label = self.gen_label_widget("Additional package formats")
|
||||
pkgfmt_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
check_hbox = gtk.HBox(False, 12)
|
||||
pkgfmt_vbox.pack_start(check_hbox, expand=False, fill=False)
|
||||
for format in all_package_format:
|
||||
if format != rootfs_format:
|
||||
check_button = gtk.CheckButton(format)
|
||||
is_active = (format in curr_package_format.split())
|
||||
check_button.set_active(is_active)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
|
||||
self.pkgfmt_info = HobInfoButton(tooltip_extra, self)
|
||||
check_hbox.pack_start(self.pkgfmt_info, expand=False, fill=False)
|
||||
|
||||
rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
|
||||
|
||||
pkgfmt_vbox.show_all()
|
||||
|
||||
return pkgfmt_vbox, rootfs_combo, check_hbox
|
||||
|
||||
def __init__(self, title, configuration, all_image_types,
|
||||
all_package_formats, all_distros, all_sdk_machines,
|
||||
max_threads, parent, flags, buttons=None):
|
||||
super(AdvancedSettingsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
# bitbake settings from Builder.Configuration
|
||||
self.configuration = configuration
|
||||
self.image_types = all_image_types
|
||||
self.all_package_formats = all_package_formats
|
||||
self.all_distros = all_distros[:]
|
||||
self.all_sdk_machines = all_sdk_machines
|
||||
self.max_threads = max_threads
|
||||
|
||||
# class members for internal use
|
||||
self.distro_combo = None
|
||||
self.dldir_text = None
|
||||
self.sstatedir_text = None
|
||||
self.sstatemirror_text = None
|
||||
self.bb_spinner = None
|
||||
self.pmake_spinner = None
|
||||
self.rootfs_size_spinner = None
|
||||
self.extra_size_spinner = None
|
||||
self.gplv3_checkbox = None
|
||||
self.sdk_checkbox = None
|
||||
self.image_types_checkbuttons = {}
|
||||
|
||||
self.md5 = self.config_md5()
|
||||
self.settings_changed = False
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.save_button = None
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def _get_sorted_value(self, var):
|
||||
return " ".join(sorted(str(var).split())) + "\n"
|
||||
|
||||
def config_md5(self):
|
||||
data = ""
|
||||
data += ("PACKAGE_CLASSES: " + self.configuration.curr_package_format + '\n')
|
||||
data += ("DISTRO: " + self._get_sorted_value(self.configuration.curr_distro))
|
||||
data += ("IMAGE_ROOTFS_SIZE: " + self._get_sorted_value(self.configuration.image_rootfs_size))
|
||||
data += ("IMAGE_EXTRA_SIZE: " + self._get_sorted_value(self.configuration.image_extra_size))
|
||||
data += ("INCOMPATIBLE_LICENSE: " + self._get_sorted_value(self.configuration.incompat_license))
|
||||
data += ("SDK_MACHINE: " + self._get_sorted_value(self.configuration.curr_sdk_machine))
|
||||
data += ("TOOLCHAIN_BUILD: " + self._get_sorted_value(self.configuration.toolchain_build))
|
||||
data += ("IMAGE_FSTYPES: " + self._get_sorted_value(self.configuration.image_fstypes))
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.nb = gtk.Notebook()
|
||||
self.nb.set_show_tabs(True)
|
||||
self.nb.append_page(self.create_image_types_page(), gtk.Label("Image types"))
|
||||
self.nb.append_page(self.create_output_page(), gtk.Label("Output"))
|
||||
self.nb.set_current_page(0)
|
||||
self.vbox.pack_start(self.nb, expand=True, fill=True)
|
||||
self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def get_num_checked_image_types(self):
|
||||
total = 0
|
||||
for b in self.image_types_checkbuttons.values():
|
||||
if b.get_active():
|
||||
total = total + 1
|
||||
return total
|
||||
|
||||
def set_save_button_state(self):
|
||||
if self.save_button:
|
||||
self.save_button.set_sensitive(self.get_num_checked_image_types() > 0)
|
||||
|
||||
def image_type_checkbutton_clicked_cb(self, button):
|
||||
self.set_save_button_state()
|
||||
if self.get_num_checked_image_types() == 0:
|
||||
# Show an error dialog
|
||||
lbl = "<b>Select an image type</b>"
|
||||
msg = "You need to select at least one image type."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
|
||||
button = dialog.add_button("OK", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def create_image_types_page(self):
|
||||
main_vbox = gtk.VBox(False, 16)
|
||||
main_vbox.set_border_width(6)
|
||||
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
distro_vbox = gtk.VBox(False, 6)
|
||||
label = self.gen_label_widget("Distro:")
|
||||
tooltip = "Selects the Yocto Project distribution you want"
|
||||
try:
|
||||
i = self.all_distros.index( "defaultsetup" )
|
||||
except ValueError:
|
||||
i = -1
|
||||
if i != -1:
|
||||
self.all_distros[ i ] = "Default"
|
||||
if self.configuration.curr_distro == "defaultsetup":
|
||||
self.configuration.curr_distro = "Default"
|
||||
distro_widget, self.distro_combo = self.gen_combo_widget(self.configuration.curr_distro, self.all_distros,"<b>Distro</b>" + "*" + tooltip)
|
||||
distro_vbox.pack_start(label, expand=False, fill=False)
|
||||
distro_vbox.pack_start(distro_widget, expand=False, fill=False)
|
||||
main_vbox.pack_start(distro_vbox, expand=False, fill=False)
|
||||
|
||||
|
||||
rows = (len(self.image_types)+1)/3
|
||||
table = gtk.Table(rows + 1, 10, True)
|
||||
advanced_vbox.pack_start(table, expand=False, fill=False)
|
||||
|
||||
tooltip = "Image file system types you want."
|
||||
info = HobInfoButton("<b>Image types</b>" + "*" + tooltip, self)
|
||||
label = self.gen_label_widget("Image types:")
|
||||
align = gtk.Alignment(0, 0.5, 0, 0)
|
||||
table.attach(align, 0, 4, 0, 1)
|
||||
align.add(label)
|
||||
table.attach(info, 4, 5, 0, 1)
|
||||
|
||||
i = 1
|
||||
j = 1
|
||||
for image_type in sorted(self.image_types):
|
||||
self.image_types_checkbuttons[image_type] = gtk.CheckButton(image_type)
|
||||
self.image_types_checkbuttons[image_type].connect("toggled", self.image_type_checkbutton_clicked_cb)
|
||||
article = ""
|
||||
if image_type.startswith(("a", "e", "i", "o", "u")):
|
||||
article = "n"
|
||||
if image_type == "live":
|
||||
self.image_types_checkbuttons[image_type].set_tooltip_text("Build iso and hddimg images")
|
||||
else:
|
||||
self.image_types_checkbuttons[image_type].set_tooltip_text("Build a%s %s image" % (article, image_type))
|
||||
table.attach(self.image_types_checkbuttons[image_type], j - 1, j + 3, i, i + 1)
|
||||
if image_type in self.configuration.image_fstypes.split():
|
||||
self.image_types_checkbuttons[image_type].set_active(True)
|
||||
i += 1
|
||||
if i > rows:
|
||||
i = 1
|
||||
j = j + 4
|
||||
|
||||
main_vbox.pack_start(advanced_vbox, expand=False, fill=False)
|
||||
self.set_save_button_state()
|
||||
|
||||
return main_vbox
|
||||
|
||||
def create_output_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Package format</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
tooltip_combo = "Selects the package format used to generate rootfs."
|
||||
tooltip_extra = "Selects extra package formats to build"
|
||||
pkgfmt_widget, self.rootfs_combo, self.check_hbox = self.gen_pkgfmt_widget(self.configuration.curr_package_format, self.all_package_formats,"<b>Root file system package format</b>" + "*" + tooltip_combo,"<b>Additional package formats</b>" + "*" + tooltip_extra)
|
||||
sub_vbox.pack_start(pkgfmt_widget, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Image size</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Image basic size (in MB)")
|
||||
tooltip = "Defines the size for the generated image. The OpenEmbedded build system determines the final size for the generated image using an algorithm that takes into account the initial disk space used for the generated image, the Image basic size value, and the Additional free space value.\n\nFor more information, check the <a href=\"http://www.yoctoproject.org/docs/current/poky-ref-manual/poky-ref-manual.html#var-IMAGE_ROOTFS_SIZE\">Yocto Project Reference Manual</a>."
|
||||
rootfs_size_widget, self.rootfs_size_spinner = self.gen_spinner_widget(int(self.configuration.image_rootfs_size*1.0/1024), 0, 65536,"<b>Image basic size</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(rootfs_size_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Additional free space (in MB)")
|
||||
tooltip = "Sets extra free disk space to be added to the generated image. Use this variable when you want to ensure that a specific amount of free disk space is available on a device after an image is installed and running."
|
||||
extra_size_widget, self.extra_size_spinner = self.gen_spinner_widget(int(self.configuration.image_extra_size*1.0/1024), 0, 65536,"<b>Additional free space</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(extra_size_widget, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Licensing</span>'), expand=False, fill=False)
|
||||
self.gplv3_checkbox = gtk.CheckButton("Exclude GPLv3 packages")
|
||||
self.gplv3_checkbox.set_tooltip_text("Check this box to prevent GPLv3 packages from being included in your image")
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.gplv3_checkbox.set_active(True)
|
||||
else:
|
||||
self.gplv3_checkbox.set_active(False)
|
||||
advanced_vbox.pack_start(self.gplv3_checkbox, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">SDK</span>'), expand=False, fill=False)
|
||||
sub_hbox = gtk.HBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_hbox, expand=False, fill=False)
|
||||
self.sdk_checkbox = gtk.CheckButton("Populate SDK")
|
||||
tooltip = "Check this box to generate an SDK tarball that consists of the cross-toolchain and a sysroot that contains development packages for your image."
|
||||
self.sdk_checkbox.set_tooltip_text(tooltip)
|
||||
self.sdk_checkbox.set_active(self.configuration.toolchain_build)
|
||||
sub_hbox.pack_start(self.sdk_checkbox, expand=False, fill=False)
|
||||
|
||||
tooltip = "Select the host platform for which you want to run the toolchain contained in the SDK tarball."
|
||||
sdk_machine_widget, self.sdk_machine_combo = self.gen_combo_widget(self.configuration.curr_sdk_machine, self.all_sdk_machines,"<b>Populate SDK</b>" + "*" + tooltip)
|
||||
sub_hbox.pack_start(sdk_machine_widget, expand=False, fill=False)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
package_format = []
|
||||
package_format.append(self.rootfs_combo.get_active_text())
|
||||
for child in self.check_hbox:
|
||||
if isinstance(child, gtk.CheckButton) and child.get_active():
|
||||
package_format.append(child.get_label())
|
||||
self.configuration.curr_package_format = " ".join(package_format)
|
||||
|
||||
distro = self.distro_combo.get_active_text()
|
||||
if distro == "Default":
|
||||
distro = "defaultsetup"
|
||||
self.configuration.curr_distro = distro
|
||||
self.configuration.image_rootfs_size = self.rootfs_size_spinner.get_value_as_int() * 1024
|
||||
self.configuration.image_extra_size = self.extra_size_spinner.get_value_as_int() * 1024
|
||||
|
||||
self.configuration.image_fstypes = ""
|
||||
for image_type in self.image_types:
|
||||
if self.image_types_checkbuttons[image_type].get_active():
|
||||
self.configuration.image_fstypes += (" " + image_type)
|
||||
self.configuration.image_fstypes.strip()
|
||||
|
||||
if self.gplv3_checkbox.get_active():
|
||||
if "GPLv3" not in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license += " GPLv3"
|
||||
else:
|
||||
if "GPLv3" in self.configuration.incompat_license.split():
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.split().remove("GPLv3")
|
||||
self.configuration.incompat_license = " ".join(self.configuration.incompat_license or [])
|
||||
self.configuration.incompat_license = self.configuration.incompat_license.strip()
|
||||
|
||||
self.configuration.toolchain_build = self.sdk_checkbox.get_active()
|
||||
self.configuration.curr_sdk_machine = self.sdk_machine_combo.get_active_text()
|
||||
md5 = self.config_md5()
|
||||
self.settings_changed = (self.md5 != md5)
|
||||
163
bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
Normal file
163
bitbake/lib/bb/ui/crumbs/hig/parsingwarningsdialog.py
Normal file
@@ -0,0 +1,163 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
#
|
||||
# ParsingWarningsDialog
|
||||
#
|
||||
class ParsingWarningsDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, warnings, parent, flags, buttons=None):
|
||||
super(ParsingWarningsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.warnings = warnings
|
||||
self.warning_on = 0
|
||||
self.warn_nb = len(warnings)
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def cancel_button_cb(self, button):
|
||||
self.destroy()
|
||||
|
||||
def previous_button_cb(self, button):
|
||||
self.warning_on = self.warning_on - 1
|
||||
self.refresh_components()
|
||||
|
||||
def next_button_cb(self, button):
|
||||
self.warning_on = self.warning_on + 1
|
||||
self.refresh_components()
|
||||
|
||||
def refresh_components(self):
|
||||
lbl = self.warnings[self.warning_on]
|
||||
#when the warning text has more than 400 chars, it uses a scroll bar
|
||||
if 0<= len(lbl) < 400:
|
||||
self.warning_label.set_size_request(320, 230)
|
||||
self.warning_label.set_use_markup(True)
|
||||
self.warning_label.set_line_wrap(True)
|
||||
self.warning_label.set_markup(lbl)
|
||||
self.warning_label.set_property("yalign", 0.00)
|
||||
else:
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.msgView = gtk.TextView()
|
||||
self.msgView.set_editable(False)
|
||||
self.msgView.set_wrap_mode(gtk.WRAP_WORD)
|
||||
self.msgView.set_cursor_visible(False)
|
||||
self.msgView.set_size_request(320, 230)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(lbl)
|
||||
self.msgView.set_buffer(self.buf)
|
||||
self.textWindow.add(self.msgView)
|
||||
self.msgView.show()
|
||||
|
||||
if self.warning_on==0:
|
||||
self.previous_button.set_sensitive(False)
|
||||
else:
|
||||
self.previous_button.set_sensitive(True)
|
||||
|
||||
if self.warning_on==self.warn_nb-1:
|
||||
self.next_button.set_sensitive(False)
|
||||
else:
|
||||
self.next_button.set_sensitive(True)
|
||||
|
||||
if self.warn_nb>1:
|
||||
self.heading = "Warning " + str(self.warning_on + 1) + " of " + str(self.warn_nb)
|
||||
self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
|
||||
else:
|
||||
self.heading = "Warning"
|
||||
self.heading_label.set_markup('<span weight="bold">%s</span>' % self.heading)
|
||||
|
||||
self.show_all()
|
||||
|
||||
if 0<= len(lbl) < 400:
|
||||
self.textWindow.hide()
|
||||
else:
|
||||
self.warning_label.hide()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(350, 350)
|
||||
self.heading_label = gtk.Label()
|
||||
self.heading_label.set_alignment(0, 0)
|
||||
self.warning_label = gtk.Label()
|
||||
self.warning_label.set_selectable(True)
|
||||
self.warning_label.set_alignment(0, 0)
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
|
||||
table = gtk.Table(1, 10, False)
|
||||
|
||||
cancel_button = gtk.Button()
|
||||
cancel_button.set_label("Close")
|
||||
cancel_button.connect("clicked", self.cancel_button_cb)
|
||||
cancel_button.set_size_request(110, 30)
|
||||
|
||||
self.previous_button = gtk.Button()
|
||||
image1 = gtk.image_new_from_stock(gtk.STOCK_GO_BACK, gtk.ICON_SIZE_BUTTON)
|
||||
image1.show()
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
self.previous_button.add(box)
|
||||
lbl = gtk.Label("Previous")
|
||||
lbl.show()
|
||||
box.pack_start(image1, expand=False, fill=False, padding=3)
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=3)
|
||||
self.previous_button.connect("clicked", self.previous_button_cb)
|
||||
self.previous_button.set_size_request(110, 30)
|
||||
|
||||
self.next_button = gtk.Button()
|
||||
image2 = gtk.image_new_from_stock(gtk.STOCK_GO_FORWARD, gtk.ICON_SIZE_BUTTON)
|
||||
image2.show()
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
self.next_button.add(box)
|
||||
lbl = gtk.Label("Next")
|
||||
lbl.show()
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=3)
|
||||
box.pack_start(image2, expand=False, fill=False, padding=3)
|
||||
self.next_button.connect("clicked", self.next_button_cb)
|
||||
self.next_button.set_size_request(110, 30)
|
||||
|
||||
#when there more than one warning, we need "previous" and "next" button
|
||||
if self.warn_nb>1:
|
||||
self.vbox.pack_start(self.heading_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.warning_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.textWindow, expand=False, fill=False)
|
||||
table.attach(cancel_button, 6, 7, 0, 1, xoptions=gtk.SHRINK)
|
||||
table.attach(self.previous_button, 7, 8, 0, 1, xoptions=gtk.SHRINK)
|
||||
table.attach(self.next_button, 8, 9, 0, 1, xoptions=gtk.SHRINK)
|
||||
self.vbox.pack_end(table, expand=False, fill=False)
|
||||
else:
|
||||
self.vbox.pack_start(self.heading_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.warning_label, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.textWindow, expand=False, fill=False)
|
||||
cancel_button = self.add_button("Close", gtk.RESPONSE_CANCEL)
|
||||
HobAltButton.style_button(cancel_button)
|
||||
|
||||
self.refresh_components()
|
||||
90
bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
Normal file
90
bitbake/lib/bb/ui/crumbs/hig/proxydetailsdialog.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class ProxyDetailsDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, user, passwd, parent, flags, buttons=None):
|
||||
super(ProxyDetailsDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.auth = not (user == None or passwd == None or user == "")
|
||||
self.user = user or ""
|
||||
self.passwd = passwd or ""
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.auth_checkbox = gtk.CheckButton("Use authentication")
|
||||
self.auth_checkbox.set_tooltip_text("Check this box to set the username and the password")
|
||||
self.auth_checkbox.set_active(self.auth)
|
||||
self.auth_checkbox.connect("toggled", self.auth_checkbox_toggled_cb)
|
||||
self.vbox.pack_start(self.auth_checkbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.user_label = gtk.Label("Username:")
|
||||
self.user_text = gtk.Entry()
|
||||
self.user_text.set_text(self.user)
|
||||
hbox.pack_start(self.user_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.user_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.passwd_label = gtk.Label("Password:")
|
||||
self.passwd_text = gtk.Entry()
|
||||
self.passwd_text.set_text(self.passwd)
|
||||
hbox.pack_start(self.passwd_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.passwd_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
self.refresh_auth_components()
|
||||
self.show_all()
|
||||
|
||||
def refresh_auth_components(self):
|
||||
self.user_label.set_sensitive(self.auth)
|
||||
self.user_text.set_editable(self.auth)
|
||||
self.user_text.set_sensitive(self.auth)
|
||||
self.passwd_label.set_sensitive(self.auth)
|
||||
self.passwd_text.set_editable(self.auth)
|
||||
self.passwd_text.set_sensitive(self.auth)
|
||||
|
||||
def auth_checkbox_toggled_cb(self, button):
|
||||
self.auth = self.auth_checkbox.get_active()
|
||||
self.refresh_auth_components()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_OK:
|
||||
if self.auth:
|
||||
self.user = self.user_text.get_text()
|
||||
self.passwd = self.passwd_text.get_text()
|
||||
else:
|
||||
self.user = None
|
||||
self.passwd = None
|
||||
51
bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
Normal file
51
bitbake/lib/bb/ui/crumbs/hig/retrieveimagedialog.py
Normal file
@@ -0,0 +1,51 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
|
||||
class RetrieveImageDialog (gtk.FileChooserDialog):
|
||||
"""
|
||||
This class is used to create a dialog that permits to retrieve
|
||||
a custom image saved previously from Hob.
|
||||
"""
|
||||
def __init__(self, directory,title, parent, flags, buttons=None):
|
||||
super(RetrieveImageDialog, self).__init__(title, None, gtk.FILE_CHOOSER_ACTION_OPEN,
|
||||
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN, gtk.RESPONSE_OK))
|
||||
self.directory = directory
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_show_hidden(True)
|
||||
self.set_default_response(gtk.RESPONSE_OK)
|
||||
self.set_current_folder(self.directory)
|
||||
|
||||
vbox = self.get_children()[0].get_children()[0].get_children()[0]
|
||||
for child in vbox.get_children()[0].get_children()[0].get_children()[0].get_children():
|
||||
vbox.get_children()[0].get_children()[0].get_children()[0].remove(child)
|
||||
|
||||
label1 = gtk.Label()
|
||||
label1.set_text("File system" + self.directory)
|
||||
label1.show()
|
||||
vbox.get_children()[0].get_children()[0].get_children()[0].pack_start(label1, expand=False, fill=False, padding=0)
|
||||
vbox.get_children()[0].get_children()[1].get_children()[0].hide()
|
||||
|
||||
self.get_children()[0].get_children()[1].get_children()[0].set_label("Select")
|
||||
159
bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
Normal file
159
bitbake/lib/bb/ui/crumbs/hig/saveimagedialog.py
Normal file
@@ -0,0 +1,159 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# Authored by Cristiana Voicu <cristiana.voicu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hobwidget import HobButton
|
||||
|
||||
class SaveImageDialog (CrumbsDialog):
|
||||
"""
|
||||
This class is used to create a dialog that permits to save
|
||||
a custom image in a predefined directory.
|
||||
"""
|
||||
def __init__(self, directory, name, description, title, parent, flags, buttons=None):
|
||||
super(SaveImageDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.directory = directory
|
||||
self.builder = parent
|
||||
self.name_field = name
|
||||
self.description_field = description
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_default_response(gtk.RESPONSE_OK)
|
||||
self.vbox.set_border_width(6)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 12)
|
||||
self.vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup("<b>Name</b>")
|
||||
sub_label = gtk.Label()
|
||||
sub_label.set_alignment(0, 0)
|
||||
content = "Image recipe names should be all lowercase and include only alphanumeric\n"
|
||||
content += "characters. The only special character you can use is the ASCII hyphen (-)."
|
||||
sub_label.set_markup(content)
|
||||
self.name_entry = gtk.Entry()
|
||||
self.name_entry.set_text(self.name_field)
|
||||
self.name_entry.set_size_request(350,30)
|
||||
self.name_entry.connect("changed", self.name_entry_changed)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sub_label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(self.name_entry, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 12)
|
||||
self.vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup("<b>Description</b> (optional)")
|
||||
sub_label = gtk.Label()
|
||||
sub_label.set_alignment(0, 0)
|
||||
sub_label.set_markup("The description should be less than 150 characters long.")
|
||||
self.description_entry = gtk.TextView()
|
||||
description_buffer = self.description_entry.get_buffer()
|
||||
description_buffer.set_text(self.description_field)
|
||||
description_buffer.connect("insert-text", self.limit_description_length)
|
||||
self.description_entry.set_wrap_mode(gtk.WRAP_WORD)
|
||||
self.description_entry.set_size_request(350,50)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sub_label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(self.description_entry, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 12)
|
||||
self.vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup("Your image recipe will be saved to:")
|
||||
sub_label = gtk.Label()
|
||||
sub_label.set_alignment(0, 0)
|
||||
sub_label.set_markup(self.directory)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sub_label, expand=False, fill=False)
|
||||
|
||||
table = gtk.Table(1, 4, True)
|
||||
|
||||
cancel_button = gtk.Button()
|
||||
cancel_button.set_label("Cancel")
|
||||
cancel_button.connect("clicked", self.cancel_button_cb)
|
||||
cancel_button.set_size_request(110, 30)
|
||||
|
||||
self.save_button = gtk.Button()
|
||||
self.save_button.set_label("Save")
|
||||
self.save_button.connect("clicked", self.save_button_cb)
|
||||
self.save_button.set_size_request(110, 30)
|
||||
if self.name_entry.get_text() == '':
|
||||
self.save_button.set_sensitive(False)
|
||||
|
||||
table.attach(cancel_button, 2, 3, 0, 1)
|
||||
table.attach(self.save_button, 3, 4, 0, 1)
|
||||
self.vbox.pack_end(table, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def limit_description_length(self, textbuffer, iter, text, length):
|
||||
buffer_bounds = textbuffer.get_bounds()
|
||||
entire_text = textbuffer.get_text(*buffer_bounds)
|
||||
entire_text += text
|
||||
if len(entire_text)>150 or text=="\n":
|
||||
textbuffer.emit_stop_by_name("insert-text")
|
||||
|
||||
def name_entry_changed(self, entry):
|
||||
text = entry.get_text()
|
||||
if text == '':
|
||||
self.save_button.set_sensitive(False)
|
||||
else:
|
||||
self.save_button.set_sensitive(True)
|
||||
|
||||
def cancel_button_cb(self, button):
|
||||
self.destroy()
|
||||
|
||||
def save_button_cb(self, button):
|
||||
text = self.name_entry.get_text()
|
||||
new_text = text.replace("-","")
|
||||
description_buffer = self.description_entry.get_buffer()
|
||||
description = description_buffer.get_text(description_buffer.get_start_iter(),description_buffer.get_end_iter())
|
||||
if new_text.islower() and new_text.isalnum():
|
||||
self.builder.image_details_page.image_saved = True
|
||||
self.builder.customized = False
|
||||
self.builder.generate_new_image(self.directory+text, description)
|
||||
self.builder.recipe_model.set_in_list(text, description)
|
||||
self.builder.recipe_model.set_selected_image(text)
|
||||
self.builder.image_details_page.show_page(self.builder.IMAGE_GENERATED)
|
||||
self.builder.image_details_page.name_field_template = text
|
||||
self.builder.image_details_page.description_field_template = description
|
||||
self.destroy()
|
||||
else:
|
||||
self.show_invalid_input_error_dialog()
|
||||
|
||||
def show_invalid_input_error_dialog(self):
|
||||
lbl = "<b>Invalid characters in image recipe name</b>"
|
||||
msg = "Image recipe names should be all lowercase and\n"
|
||||
msg += "include only alphanumeric characters. The only\n"
|
||||
msg += "special character you can use is the ASCII hyphen (-)."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_ERROR, msg)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
|
||||
res = dialog.run()
|
||||
self.name_entry.grab_focus()
|
||||
dialog.destroy()
|
||||
894
bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
Normal file
894
bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
Normal file
@@ -0,0 +1,894 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import hashlib
|
||||
from bb.ui.crumbs.hobwidget import hic, HobInfoButton, HobButton, HobAltButton
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.proxydetailsdialog import ProxyDetailsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
|
||||
(BUILD_ENV_PAGE_ID,
|
||||
SHARED_STATE_PAGE_ID,
|
||||
PROXIES_PAGE_ID,
|
||||
OTHERS_PAGE_ID) = range(4)
|
||||
|
||||
(TEST_NETWORK_NONE,
|
||||
TEST_NETWORK_INITIAL,
|
||||
TEST_NETWORK_RUNNING,
|
||||
TEST_NETWORK_PASSED,
|
||||
TEST_NETWORK_FAILED,
|
||||
TEST_NETWORK_CANCELED) = range(6)
|
||||
|
||||
TARGETS = [
|
||||
("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
|
||||
("text/plain", 0, 1),
|
||||
("TEXT", 0, 2),
|
||||
("STRING", 0, 3),
|
||||
]
|
||||
|
||||
def __init__(self, title, configuration, all_image_types,
|
||||
all_package_formats, all_distros, all_sdk_machines,
|
||||
max_threads, parent, flags, handler, buttons=None):
|
||||
super(SimpleSettingsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
# bitbake settings from Builder.Configuration
|
||||
self.configuration = configuration
|
||||
self.image_types = all_image_types
|
||||
self.all_package_formats = all_package_formats
|
||||
self.all_distros = all_distros
|
||||
self.all_sdk_machines = all_sdk_machines
|
||||
self.max_threads = max_threads
|
||||
|
||||
# class members for internal use
|
||||
self.dldir_text = None
|
||||
self.sstatedir_text = None
|
||||
self.sstatemirrors_list = []
|
||||
self.sstatemirrors_changed = 0
|
||||
self.bb_spinner = None
|
||||
self.pmake_spinner = None
|
||||
self.rootfs_size_spinner = None
|
||||
self.extra_size_spinner = None
|
||||
self.gplv3_checkbox = None
|
||||
self.toolchain_checkbox = None
|
||||
self.setting_store = None
|
||||
self.image_types_checkbuttons = {}
|
||||
|
||||
self.md5 = self.config_md5()
|
||||
self.proxy_md5 = self.config_proxy_md5()
|
||||
self.settings_changed = False
|
||||
self.proxy_settings_changed = False
|
||||
self.handler = handler
|
||||
self.proxy_test_ran = False
|
||||
self.selected_mirror_row = 0
|
||||
self.new_mirror = False
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def _get_sorted_value(self, var):
|
||||
return " ".join(sorted(str(var).split())) + "\n"
|
||||
|
||||
def config_proxy_md5(self):
|
||||
data = ("ENABLE_PROXY: " + self._get_sorted_value(self.configuration.enable_proxy))
|
||||
if self.configuration.enable_proxy:
|
||||
for protocol in self.configuration.proxies.keys():
|
||||
data += (protocol + ": " + self._get_sorted_value(self.configuration.combine_proxy(protocol)))
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
def config_md5(self):
|
||||
data = ""
|
||||
for key in self.configuration.extra_setting.keys():
|
||||
data += (key + ": " + self._get_sorted_value(self.configuration.extra_setting[key]))
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
def gen_proxy_entry_widget(self, protocol, parent, need_button=True, line=0):
|
||||
label = gtk.Label(protocol.upper() + " proxy")
|
||||
self.proxy_table.attach(label, 0, 1, line, line+1, xpadding=24)
|
||||
|
||||
proxy_entry = gtk.Entry()
|
||||
proxy_entry.set_size_request(300, -1)
|
||||
self.proxy_table.attach(proxy_entry, 1, 2, line, line+1, ypadding=4)
|
||||
|
||||
self.proxy_table.attach(gtk.Label(":"), 2, 3, line, line+1, xpadding=12, ypadding=4)
|
||||
|
||||
port_entry = gtk.Entry()
|
||||
port_entry.set_size_request(60, -1)
|
||||
self.proxy_table.attach(port_entry, 3, 4, line, line+1, ypadding=4)
|
||||
|
||||
details_button = HobAltButton("Details")
|
||||
details_button.connect("clicked", self.details_cb, parent, protocol)
|
||||
self.proxy_table.attach(details_button, 4, 5, line, line+1, xpadding=4, yoptions=gtk.EXPAND)
|
||||
|
||||
return proxy_entry, port_entry, details_button
|
||||
|
||||
def refresh_proxy_components(self):
|
||||
self.same_checkbox.set_sensitive(self.configuration.enable_proxy)
|
||||
|
||||
self.http_proxy.set_text(self.configuration.combine_host_only("http"))
|
||||
self.http_proxy.set_editable(self.configuration.enable_proxy)
|
||||
self.http_proxy.set_sensitive(self.configuration.enable_proxy)
|
||||
self.http_proxy_port.set_text(self.configuration.combine_port_only("http"))
|
||||
self.http_proxy_port.set_editable(self.configuration.enable_proxy)
|
||||
self.http_proxy_port.set_sensitive(self.configuration.enable_proxy)
|
||||
self.http_proxy_details.set_sensitive(self.configuration.enable_proxy)
|
||||
|
||||
self.https_proxy.set_text(self.configuration.combine_host_only("https"))
|
||||
self.https_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_port.set_text(self.configuration.combine_port_only("https"))
|
||||
self.https_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.ftp_proxy.set_text(self.configuration.combine_host_only("ftp"))
|
||||
self.ftp_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_port.set_text(self.configuration.combine_port_only("ftp"))
|
||||
self.ftp_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.socks_proxy.set_text(self.configuration.combine_host_only("socks"))
|
||||
self.socks_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.socks_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.socks_proxy_port.set_text(self.configuration.combine_port_only("socks"))
|
||||
self.socks_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.socks_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.socks_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.cvs_proxy.set_text(self.configuration.combine_host_only("cvs"))
|
||||
self.cvs_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_port.set_text(self.configuration.combine_port_only("cvs"))
|
||||
self.cvs_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
if self.configuration.same_proxy:
|
||||
if self.http_proxy.get_text():
|
||||
[w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
|
||||
if self.http_proxy_port.get_text():
|
||||
[w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
|
||||
|
||||
def proxy_checkbox_toggled_cb(self, button):
|
||||
self.configuration.enable_proxy = self.proxy_checkbox.get_active()
|
||||
if not self.configuration.enable_proxy:
|
||||
self.configuration.same_proxy = False
|
||||
self.same_checkbox.set_active(self.configuration.same_proxy)
|
||||
self.save_proxy_data()
|
||||
self.refresh_proxy_components()
|
||||
|
||||
def same_checkbox_toggled_cb(self, button):
|
||||
self.configuration.same_proxy = self.same_checkbox.get_active()
|
||||
self.save_proxy_data()
|
||||
self.refresh_proxy_components()
|
||||
|
||||
def save_proxy_data(self):
|
||||
self.configuration.split_proxy("http", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
if self.configuration.same_proxy:
|
||||
self.configuration.split_proxy("https", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("ftp", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("socks", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("cvs", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
else:
|
||||
self.configuration.split_proxy("https", self.https_proxy.get_text() + ":" + self.https_proxy_port.get_text())
|
||||
self.configuration.split_proxy("ftp", self.ftp_proxy.get_text() + ":" + self.ftp_proxy_port.get_text())
|
||||
self.configuration.split_proxy("socks", self.socks_proxy.get_text() + ":" + self.socks_proxy_port.get_text())
|
||||
self.configuration.split_proxy("cvs", self.cvs_proxy.get_text() + ":" + self.cvs_proxy_port.get_text())
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
if self.proxy_checkbox.get_active():
|
||||
# Check that all proxy entries have a corresponding port
|
||||
for proxy, port in zip(self.all_proxy_addresses, self.all_proxy_ports):
|
||||
if proxy.get_text() and not port.get_text():
|
||||
lbl = "<b>Enter all port numbers</b>"
|
||||
msg = "Proxy servers require a port number. Please make sure you have entered a port number for each proxy server."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_WARNING, msg)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
self.emit_stop_by_name("response")
|
||||
return
|
||||
|
||||
self.configuration.dldir = self.dldir_text.get_text()
|
||||
self.configuration.sstatedir = self.sstatedir_text.get_text()
|
||||
self.configuration.sstatemirror = ""
|
||||
for mirror in self.sstatemirrors_list:
|
||||
if mirror[1] != "" and mirror[2].startswith("file://"):
|
||||
if mirror[1].endswith("\\1"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
else:
|
||||
smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
|
||||
self.configuration.sstatemirror += smirror
|
||||
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
|
||||
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
|
||||
self.save_proxy_data()
|
||||
self.configuration.extra_setting = {}
|
||||
it = self.setting_store.get_iter_first()
|
||||
while it:
|
||||
key = self.setting_store.get_value(it, 0)
|
||||
value = self.setting_store.get_value(it, 1)
|
||||
self.configuration.extra_setting[key] = value
|
||||
it = self.setting_store.iter_next(it)
|
||||
|
||||
md5 = self.config_md5()
|
||||
self.settings_changed = (self.md5 != md5)
|
||||
self.proxy_settings_changed = (self.proxy_md5 != self.config_proxy_md5())
|
||||
|
||||
def create_build_environment_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Parallel threads</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("BitBake parallel threads")
|
||||
tooltip = "Sets the number of threads that BitBake tasks can simultaneously run. See the <a href=\""
|
||||
tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
|
||||
tooltip += "poky-ref-manual.html#var-BB_NUMBER_THREADS\">Poky reference manual</a> for information"
|
||||
bbthread_widget, self.bb_spinner = self.gen_spinner_widget(self.configuration.bbthread, 1, self.max_threads,"<b>BitBake prallalel threads</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(bbthread_widget, expand=False, fill=False)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Make parallel threads")
|
||||
tooltip = "Sets the maximum number of threads the host can use during the build. See the <a href=\""
|
||||
tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
|
||||
tooltip += "poky-ref-manual.html#var-PARALLEL_MAKE\">Poky reference manual</a> for information"
|
||||
pmake_widget, self.pmake_spinner = self.gen_spinner_widget(self.configuration.pmake, 1, self.max_threads,"<b>Make parallel threads</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(pmake_widget, expand=False, fill=False)
|
||||
|
||||
advanced_vbox.pack_start(self.gen_label_widget('<span weight="bold">Downloaded source code</span>'), expand=False, fill=False)
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("Downloads directory")
|
||||
tooltip = "Select a folder that caches the upstream project source code"
|
||||
dldir_widget, self.dldir_text = self.gen_entry_widget(self.configuration.dldir, self,"<b>Downloaded source code</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(dldir_widget, expand=False, fill=False)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def create_shared_state_page(self):
|
||||
advanced_vbox = gtk.VBox(False)
|
||||
advanced_vbox.set_border_width(12)
|
||||
|
||||
sub_vbox = gtk.VBox(False)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False, padding=24)
|
||||
content = "<span>Shared state directory</span>"
|
||||
tooltip = "Select a folder that caches your prebuilt results"
|
||||
label = self.gen_label_info_widget(content,"<b>Shared state directory</b>" + "*" + tooltip)
|
||||
sstatedir_widget, self.sstatedir_text = self.gen_entry_widget(self.configuration.sstatedir, self)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(sstatedir_widget, expand=False, fill=False, padding=6)
|
||||
|
||||
content = "<span weight=\"bold\">Shared state mirrors</span>"
|
||||
tooltip = "URLs pointing to pre-built mirrors that will speed your build. "
|
||||
tooltip += "Select the \'Standard\' configuration if the structure of your "
|
||||
tooltip += "mirror replicates the structure of your local shared state directory. "
|
||||
tooltip += "For more information on shared state mirrors, check the <a href=\""
|
||||
tooltip += "http://www.yoctoproject.org/docs/current/poky-ref-manual/"
|
||||
tooltip += "poky-ref-manual.html#shared-state\">Yocto Project Reference Manual</a>."
|
||||
table = self.gen_label_info_widget(content,"<b>Shared state mirrors</b>" + "*" + tooltip)
|
||||
advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
|
||||
|
||||
sub_vbox = gtk.VBox(False)
|
||||
advanced_vbox.pack_start(sub_vbox, gtk.TRUE, gtk.TRUE, 0)
|
||||
|
||||
if self.sstatemirrors_changed == 0:
|
||||
self.sstatemirrors_changed = 1
|
||||
sstatemirrors = self.configuration.sstatemirror
|
||||
if sstatemirrors == "":
|
||||
sm_list = ["Standard", "", "file://(.*)"]
|
||||
self.sstatemirrors_list.append(sm_list)
|
||||
else:
|
||||
sstatemirrors = [x for x in sstatemirrors.split('\\n')]
|
||||
for sstatemirror in sstatemirrors:
|
||||
sstatemirror_fields = [x for x in sstatemirror.split(' ') if x.strip()]
|
||||
if len(sstatemirror_fields) == 2:
|
||||
if sstatemirror_fields[0] == "file://(.*)" or sstatemirror_fields[0] == "file://.*":
|
||||
sm_list = ["Standard", sstatemirror_fields[1], sstatemirror_fields[0]]
|
||||
else:
|
||||
sm_list = ["Custom", sstatemirror_fields[1], sstatemirror_fields[0]]
|
||||
self.sstatemirrors_list.append(sm_list)
|
||||
|
||||
sstatemirrors_widget, sstatemirrors_store = self.gen_shared_sstate_widget(self.sstatemirrors_list, self)
|
||||
sub_vbox.pack_start(sstatemirrors_widget, expand=True, fill=True)
|
||||
|
||||
table = gtk.Table(1, 10, False)
|
||||
table.set_col_spacings(6)
|
||||
add_mirror_button = HobAltButton("Add mirror")
|
||||
add_mirror_button.connect("clicked", self.add_mirror)
|
||||
add_mirror_button.set_size_request(120,30)
|
||||
table.attach(add_mirror_button, 1, 2, 0, 1, xoptions=gtk.SHRINK)
|
||||
|
||||
self.delete_button = HobAltButton("Delete mirror")
|
||||
self.delete_button.connect("clicked", self.delete_cb)
|
||||
self.delete_button.set_size_request(120, 30)
|
||||
table.attach(self.delete_button, 3, 4, 0, 1, xoptions=gtk.SHRINK)
|
||||
|
||||
advanced_vbox.pack_start(table, expand=False, fill=False, padding=6)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def gen_shared_sstate_widget(self, sstatemirrors_list, window):
|
||||
hbox = gtk.HBox(False)
|
||||
|
||||
sstatemirrors_store = gtk.ListStore(str, str, str)
|
||||
for sstatemirror in sstatemirrors_list:
|
||||
sstatemirrors_store.append(sstatemirror)
|
||||
|
||||
self.sstatemirrors_tv = gtk.TreeView()
|
||||
self.sstatemirrors_tv.set_rules_hint(True)
|
||||
self.sstatemirrors_tv.set_headers_visible(True)
|
||||
tree_selection = self.sstatemirrors_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# Allow enable drag and drop of rows including row move
|
||||
self.sstatemirrors_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
|
||||
self.TARGETS,
|
||||
gtk.gdk.ACTION_DEFAULT|
|
||||
gtk.gdk.ACTION_MOVE)
|
||||
self.sstatemirrors_tv.enable_model_drag_dest(self.TARGETS,
|
||||
gtk.gdk.ACTION_DEFAULT)
|
||||
self.sstatemirrors_tv.connect("drag_data_get", self.drag_data_get_cb)
|
||||
self.sstatemirrors_tv.connect("drag_data_received", self.drag_data_received_cb)
|
||||
|
||||
|
||||
self.scroll = gtk.ScrolledWindow()
|
||||
self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
self.scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.scroll.connect('size-allocate', self.scroll_changed)
|
||||
self.scroll.add(self.sstatemirrors_tv)
|
||||
|
||||
#list store for cell renderer
|
||||
m = gtk.ListStore(gobject.TYPE_STRING)
|
||||
m.append(["Standard"])
|
||||
m.append(["Custom"])
|
||||
|
||||
cell0 = gtk.CellRendererCombo()
|
||||
cell0.set_property("model",m)
|
||||
cell0.set_property("text-column", 0)
|
||||
cell0.set_property("editable", True)
|
||||
cell0.set_property("has-entry", False)
|
||||
col0 = gtk.TreeViewColumn("Configuration")
|
||||
col0.pack_start(cell0, False)
|
||||
col0.add_attribute(cell0, "text", 0)
|
||||
col0.set_cell_data_func(cell0, self.configuration_field)
|
||||
self.sstatemirrors_tv.append_column(col0)
|
||||
|
||||
cell0.connect("edited", self.combo_changed, sstatemirrors_store)
|
||||
|
||||
self.cell1 = gtk.CellRendererText()
|
||||
self.cell1.set_padding(5,2)
|
||||
col1 = gtk.TreeViewColumn('Regex', self.cell1)
|
||||
col1.set_cell_data_func(self.cell1, self.regex_field)
|
||||
self.sstatemirrors_tv.append_column(col1)
|
||||
|
||||
self.cell1.connect("edited", self.regex_changed, sstatemirrors_store)
|
||||
|
||||
cell2 = gtk.CellRendererText()
|
||||
cell2.set_padding(5,2)
|
||||
cell2.set_property("editable", True)
|
||||
col2 = gtk.TreeViewColumn('URL', cell2)
|
||||
col2.set_cell_data_func(cell2, self.url_field)
|
||||
self.sstatemirrors_tv.append_column(col2)
|
||||
|
||||
cell2.connect("edited", self.url_changed, sstatemirrors_store)
|
||||
|
||||
self.sstatemirrors_tv.set_model(sstatemirrors_store)
|
||||
self.sstatemirrors_tv.set_cursor(self.selected_mirror_row)
|
||||
hbox.pack_start(self.scroll, expand=True, fill=True)
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, sstatemirrors_store
|
||||
|
||||
def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
|
||||
treeselection = treeview.get_selection()
|
||||
model, iter = treeselection.get_selected()
|
||||
data = model.get_string_from_iter(iter)
|
||||
selection.set(selection.target, 8, data)
|
||||
|
||||
def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
|
||||
model = treeview.get_model()
|
||||
data = []
|
||||
tree_iter = model.get_iter_from_string(selection.data)
|
||||
data.append(model.get_value(tree_iter, 0))
|
||||
data.append(model.get_value(tree_iter, 1))
|
||||
data.append(model.get_value(tree_iter, 2))
|
||||
|
||||
drop_info = treeview.get_dest_row_at_pos(x, y)
|
||||
if drop_info:
|
||||
path, position = drop_info
|
||||
iter = model.get_iter(path)
|
||||
if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
|
||||
model.insert_before(iter, data)
|
||||
else:
|
||||
model.insert_after(iter, data)
|
||||
else:
|
||||
model.append(data)
|
||||
if context.action == gtk.gdk.ACTION_MOVE:
|
||||
context.finish(True, True, etime)
|
||||
return
|
||||
|
||||
def delete_cb(self, button):
|
||||
selection = self.sstatemirrors_tv.get_selection()
|
||||
tree_model, tree_iter = selection.get_selected()
|
||||
index = int(tree_model.get_string_from_iter(tree_iter))
|
||||
if index == 0:
|
||||
self.selected_mirror_row = index
|
||||
else:
|
||||
self.selected_mirror_row = index - 1
|
||||
self.sstatemirrors_list.pop(index)
|
||||
self.refresh_shared_state_page()
|
||||
if not self.sstatemirrors_list:
|
||||
self.delete_button.set_sensitive(False)
|
||||
|
||||
def add_mirror(self, button):
|
||||
self.new_mirror = True
|
||||
tooltip = "Select the pre-built mirror that will speed your build"
|
||||
index = len(self.sstatemirrors_list)
|
||||
self.selected_mirror_row = index
|
||||
sm_list = ["Standard", "", "file://(.*)"]
|
||||
self.sstatemirrors_list.append(sm_list)
|
||||
self.refresh_shared_state_page()
|
||||
|
||||
def scroll_changed(self, widget, event, data=None):
|
||||
if self.new_mirror == True:
|
||||
adj = widget.get_vadjustment()
|
||||
adj.set_value(adj.upper - adj.page_size)
|
||||
self.new_mirror = False
|
||||
|
||||
def combo_changed(self, widget, path, text, model):
|
||||
model[path][0] = text
|
||||
selection = self.sstatemirrors_tv.get_selection()
|
||||
tree_model, tree_iter = selection.get_selected()
|
||||
index = int(tree_model.get_string_from_iter(tree_iter))
|
||||
self.sstatemirrors_list[index][0] = text
|
||||
|
||||
def regex_changed(self, cell, path, new_text, user_data):
|
||||
user_data[path][2] = new_text
|
||||
selection = self.sstatemirrors_tv.get_selection()
|
||||
tree_model, tree_iter = selection.get_selected()
|
||||
index = int(tree_model.get_string_from_iter(tree_iter))
|
||||
self.sstatemirrors_list[index][2] = new_text
|
||||
return
|
||||
|
||||
def url_changed(self, cell, path, new_text, user_data):
|
||||
if new_text!="Enter the mirror URL" and new_text!="Match regex and replace it with this URL":
|
||||
user_data[path][1] = new_text
|
||||
selection = self.sstatemirrors_tv.get_selection()
|
||||
tree_model, tree_iter = selection.get_selected()
|
||||
index = int(tree_model.get_string_from_iter(tree_iter))
|
||||
self.sstatemirrors_list[index][1] = new_text
|
||||
return
|
||||
|
||||
def configuration_field(self, column, cell, model, iter):
|
||||
cell.set_property('text', model.get_value(iter, 0))
|
||||
if model.get_value(iter, 0) == "Standard":
|
||||
self.cell1.set_property("sensitive", False)
|
||||
self.cell1.set_property("editable", False)
|
||||
else:
|
||||
self.cell1.set_property("sensitive", True)
|
||||
self.cell1.set_property("editable", True)
|
||||
return
|
||||
|
||||
def regex_field(self, column, cell, model, iter):
|
||||
cell.set_property('text', model.get_value(iter, 2))
|
||||
return
|
||||
|
||||
def url_field(self, column, cell, model, iter):
|
||||
text = model.get_value(iter, 1)
|
||||
if text == "":
|
||||
if model.get_value(iter, 0) == "Standard":
|
||||
text = "Enter the mirror URL"
|
||||
else:
|
||||
text = "Match regex and replace it with this URL"
|
||||
cell.set_property('text', text)
|
||||
return
|
||||
|
||||
def refresh_shared_state_page(self):
|
||||
page_num = self.nb.get_current_page()
|
||||
self.nb.remove_page(page_num);
|
||||
self.nb.insert_page(self.create_shared_state_page(), gtk.Label("Shared state"),page_num)
|
||||
self.show_all()
|
||||
self.nb.set_current_page(page_num)
|
||||
|
||||
def test_proxy_ended(self, passed):
|
||||
self.proxy_test_running = False
|
||||
self.set_test_proxy_state(self.TEST_NETWORK_PASSED if passed else self.TEST_NETWORK_FAILED)
|
||||
self.set_sensitive(True)
|
||||
self.refresh_proxy_components()
|
||||
|
||||
def timer_func(self):
|
||||
self.test_proxy_progress.pulse()
|
||||
return self.proxy_test_running
|
||||
|
||||
def test_network_button_cb(self, b):
|
||||
self.set_test_proxy_state(self.TEST_NETWORK_RUNNING)
|
||||
self.set_sensitive(False)
|
||||
self.save_proxy_data()
|
||||
if self.configuration.enable_proxy == True:
|
||||
self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
|
||||
self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
|
||||
self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
|
||||
self.handler.set_socks_proxy(self.configuration.combine_proxy("socks"))
|
||||
self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
|
||||
elif self.configuration.enable_proxy == False:
|
||||
self.handler.set_http_proxy("")
|
||||
self.handler.set_https_proxy("")
|
||||
self.handler.set_ftp_proxy("")
|
||||
self.handler.set_socks_proxy("")
|
||||
self.handler.set_cvs_proxy("", "")
|
||||
self.proxy_test_ran = True
|
||||
self.proxy_test_running = True
|
||||
gobject.timeout_add(100, self.timer_func)
|
||||
self.handler.trigger_network_test()
|
||||
|
||||
def test_proxy_focus_event(self, w, direction):
|
||||
if self.test_proxy_state in [self.TEST_NETWORK_PASSED, self.TEST_NETWORK_FAILED]:
|
||||
self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
|
||||
return False
|
||||
|
||||
def http_proxy_changed(self, e):
|
||||
if not self.configuration.same_proxy:
|
||||
return
|
||||
if e == self.http_proxy:
|
||||
[w.set_text(self.http_proxy.get_text()) for w in self.same_proxy_addresses]
|
||||
else:
|
||||
[w.set_text(self.http_proxy_port.get_text()) for w in self.same_proxy_ports]
|
||||
|
||||
def proxy_address_focus_out_event(self, w, direction):
|
||||
text = w.get_text()
|
||||
if not text:
|
||||
return False
|
||||
if text.find("//") == -1:
|
||||
w.set_text("http://" + text)
|
||||
return False
|
||||
|
||||
def set_test_proxy_state(self, state):
|
||||
if self.test_proxy_state == state:
|
||||
return
|
||||
[self.proxy_table.remove(w) for w in self.test_gui_elements]
|
||||
if state == self.TEST_NETWORK_INITIAL:
|
||||
self.proxy_table.attach(self.test_network_button, 1, 2, 5, 6)
|
||||
self.test_network_button.show()
|
||||
elif state == self.TEST_NETWORK_RUNNING:
|
||||
self.test_proxy_progress.set_rcstyle("running")
|
||||
self.test_proxy_progress.set_text("Testing network configuration")
|
||||
self.proxy_table.attach(self.test_proxy_progress, 0, 5, 5, 6, xpadding=4)
|
||||
self.test_proxy_progress.show()
|
||||
else: # passed or failed
|
||||
self.dummy_progress.update(1.0)
|
||||
if state == self.TEST_NETWORK_PASSED:
|
||||
self.dummy_progress.set_text("Your network is properly configured")
|
||||
self.dummy_progress.set_rcstyle("running")
|
||||
else:
|
||||
self.dummy_progress.set_text("Network test failed")
|
||||
self.dummy_progress.set_rcstyle("fail")
|
||||
self.proxy_table.attach(self.dummy_progress, 0, 4, 5, 6)
|
||||
self.proxy_table.attach(self.retest_network_button, 4, 5, 5, 6, xpadding=4)
|
||||
self.dummy_progress.show()
|
||||
self.retest_network_button.show()
|
||||
self.test_proxy_state = state
|
||||
|
||||
def create_network_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
self.same_proxy_addresses = []
|
||||
self.same_proxy_ports = []
|
||||
self.all_proxy_ports = []
|
||||
self.all_proxy_addresses = []
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set the proxies used when fetching source code</span>")
|
||||
tooltip = "Set the proxies used when fetching source code. A blank field uses a direct internet connection."
|
||||
info = HobInfoButton("<span weight=\"bold\">Set the proxies used when fetching source code</span>" + "*" + tooltip, self)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.pack_start(label, expand=True, fill=True)
|
||||
hbox.pack_start(info, expand=False, fill=False)
|
||||
sub_vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
proxy_test_focus = []
|
||||
self.direct_checkbox = gtk.RadioButton(None, "Direct network connection")
|
||||
proxy_test_focus.append(self.direct_checkbox)
|
||||
self.direct_checkbox.set_tooltip_text("Check this box to use a direct internet connection with no proxy")
|
||||
self.direct_checkbox.set_active(not self.configuration.enable_proxy)
|
||||
sub_vbox.pack_start(self.direct_checkbox, expand=False, fill=False)
|
||||
|
||||
self.proxy_checkbox = gtk.RadioButton(self.direct_checkbox, "Manual proxy configuration")
|
||||
proxy_test_focus.append(self.proxy_checkbox)
|
||||
self.proxy_checkbox.set_tooltip_text("Check this box to manually set up a specific proxy")
|
||||
self.proxy_checkbox.set_active(self.configuration.enable_proxy)
|
||||
sub_vbox.pack_start(self.proxy_checkbox, expand=False, fill=False)
|
||||
|
||||
self.same_checkbox = gtk.CheckButton("Use the HTTP proxy for all protocols")
|
||||
proxy_test_focus.append(self.same_checkbox)
|
||||
self.same_checkbox.set_tooltip_text("Check this box to use the HTTP proxy for all five proxies")
|
||||
self.same_checkbox.set_active(self.configuration.same_proxy)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.pack_start(self.same_checkbox, expand=False, fill=False, padding=24)
|
||||
sub_vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
self.proxy_table = gtk.Table(6, 5, False)
|
||||
self.http_proxy, self.http_proxy_port, self.http_proxy_details = self.gen_proxy_entry_widget(
|
||||
"http", self, True, 0)
|
||||
proxy_test_focus +=[self.http_proxy, self.http_proxy_port]
|
||||
self.http_proxy.connect("changed", self.http_proxy_changed)
|
||||
self.http_proxy_port.connect("changed", self.http_proxy_changed)
|
||||
|
||||
self.https_proxy, self.https_proxy_port, self.https_proxy_details = self.gen_proxy_entry_widget(
|
||||
"https", self, True, 1)
|
||||
proxy_test_focus += [self.https_proxy, self.https_proxy_port]
|
||||
self.same_proxy_addresses.append(self.https_proxy)
|
||||
self.same_proxy_ports.append(self.https_proxy_port)
|
||||
|
||||
self.ftp_proxy, self.ftp_proxy_port, self.ftp_proxy_details = self.gen_proxy_entry_widget(
|
||||
"ftp", self, True, 2)
|
||||
proxy_test_focus += [self.ftp_proxy, self.ftp_proxy_port]
|
||||
self.same_proxy_addresses.append(self.ftp_proxy)
|
||||
self.same_proxy_ports.append(self.ftp_proxy_port)
|
||||
|
||||
self.socks_proxy, self.socks_proxy_port, self.socks_proxy_details = self.gen_proxy_entry_widget(
|
||||
"socks", self, True, 3)
|
||||
proxy_test_focus += [self.socks_proxy, self.socks_proxy_port]
|
||||
self.same_proxy_addresses.append(self.socks_proxy)
|
||||
self.same_proxy_ports.append(self.socks_proxy_port)
|
||||
|
||||
self.cvs_proxy, self.cvs_proxy_port, self.cvs_proxy_details = self.gen_proxy_entry_widget(
|
||||
"cvs", self, True, 4)
|
||||
proxy_test_focus += [self.cvs_proxy, self.cvs_proxy_port]
|
||||
self.same_proxy_addresses.append(self.cvs_proxy)
|
||||
self.same_proxy_ports.append(self.cvs_proxy_port)
|
||||
self.all_proxy_ports = self.same_proxy_ports + [self.http_proxy_port]
|
||||
self.all_proxy_addresses = self.same_proxy_addresses + [self.http_proxy]
|
||||
sub_vbox.pack_start(self.proxy_table, expand=False, fill=False)
|
||||
self.proxy_table.show_all()
|
||||
|
||||
# Create the graphical elements for the network test feature, but don't display them yet
|
||||
self.test_network_button = HobAltButton("Test network configuration")
|
||||
self.test_network_button.connect("clicked", self.test_network_button_cb)
|
||||
self.test_proxy_progress = HobProgressBar()
|
||||
self.dummy_progress = HobProgressBar()
|
||||
self.retest_network_button = HobAltButton("Retest")
|
||||
self.retest_network_button.connect("clicked", self.test_network_button_cb)
|
||||
self.test_gui_elements = [self.test_network_button, self.test_proxy_progress, self.dummy_progress, self.retest_network_button]
|
||||
# Initialize the network tester
|
||||
self.test_proxy_state = self.TEST_NETWORK_NONE
|
||||
self.set_test_proxy_state(self.TEST_NETWORK_INITIAL)
|
||||
self.proxy_test_passed_id = self.handler.connect("network-passed", lambda h:self.test_proxy_ended(True))
|
||||
self.proxy_test_failed_id = self.handler.connect("network-failed", lambda h:self.test_proxy_ended(False))
|
||||
[w.connect("focus-in-event", self.test_proxy_focus_event) for w in proxy_test_focus]
|
||||
[w.connect("focus-out-event", self.proxy_address_focus_out_event) for w in self.all_proxy_addresses]
|
||||
|
||||
self.direct_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
|
||||
self.proxy_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
|
||||
self.same_checkbox.connect("toggled", self.same_checkbox_toggled_cb)
|
||||
|
||||
self.refresh_proxy_components()
|
||||
return advanced_vbox
|
||||
|
||||
def switch_to_page(self, page_id):
|
||||
self.nb.set_current_page(page_id)
|
||||
|
||||
def details_cb(self, button, parent, protocol):
|
||||
self.save_proxy_data()
|
||||
dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
|
||||
user = self.configuration.proxies[protocol][1],
|
||||
passwd = self.configuration.proxies[protocol][2],
|
||||
parent = parent,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_OK:
|
||||
self.configuration.proxies[protocol][1] = dialog.user
|
||||
self.configuration.proxies[protocol][2] = dialog.passwd
|
||||
self.refresh_proxy_components()
|
||||
dialog.destroy()
|
||||
|
||||
def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
|
||||
combo_item = self.rootfs_combo.get_active_text()
|
||||
for child in check_hbox.get_children():
|
||||
if isinstance(child, gtk.CheckButton):
|
||||
check_hbox.remove(child)
|
||||
for format in all_package_format:
|
||||
if format != combo_item:
|
||||
check_button = gtk.CheckButton(format)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
check_hbox.show_all()
|
||||
|
||||
def gen_pkgfmt_widget(self, curr_package_format, all_package_format, tooltip_combo="", tooltip_extra=""):
|
||||
pkgfmt_hbox = gtk.HBox(False, 24)
|
||||
|
||||
rootfs_vbox = gtk.VBox(False, 6)
|
||||
pkgfmt_hbox.pack_start(rootfs_vbox, expand=False, fill=False)
|
||||
|
||||
label = self.gen_label_widget("Root file system package format")
|
||||
rootfs_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
rootfs_format = ""
|
||||
if curr_package_format:
|
||||
rootfs_format = curr_package_format.split()[0]
|
||||
|
||||
rootfs_format_widget, rootfs_combo = self.gen_combo_widget(rootfs_format, all_package_format, tooltip_combo)
|
||||
rootfs_vbox.pack_start(rootfs_format_widget, expand=False, fill=False)
|
||||
|
||||
extra_vbox = gtk.VBox(False, 6)
|
||||
pkgfmt_hbox.pack_start(extra_vbox, expand=False, fill=False)
|
||||
|
||||
label = self.gen_label_widget("Additional package formats")
|
||||
extra_vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
check_hbox = gtk.HBox(False, 12)
|
||||
extra_vbox.pack_start(check_hbox, expand=False, fill=False)
|
||||
for format in all_package_format:
|
||||
if format != rootfs_format:
|
||||
check_button = gtk.CheckButton(format)
|
||||
is_active = (format in curr_package_format.split())
|
||||
check_button.set_active(is_active)
|
||||
check_hbox.pack_start(check_button, expand=False, fill=False)
|
||||
|
||||
info = HobInfoButton(tooltip_extra, self)
|
||||
check_hbox.pack_end(info, expand=False, fill=False)
|
||||
|
||||
rootfs_combo.connect("changed", self.rootfs_combo_changed_cb, all_package_format, check_hbox)
|
||||
|
||||
pkgfmt_hbox.show_all()
|
||||
|
||||
return pkgfmt_hbox, rootfs_combo, check_hbox
|
||||
|
||||
def editable_settings_cell_edited(self, cell, path_string, new_text, model):
|
||||
it = model.get_iter_from_string(path_string)
|
||||
column = cell.get_data("column")
|
||||
model.set(it, column, new_text)
|
||||
|
||||
def editable_settings_add_item_clicked(self, button, model):
|
||||
new_item = ["##KEY##", "##VALUE##"]
|
||||
|
||||
iter = model.append()
|
||||
model.set (iter,
|
||||
0, new_item[0],
|
||||
1, new_item[1],
|
||||
)
|
||||
|
||||
def editable_settings_remove_item_clicked(self, button, treeview):
|
||||
selection = treeview.get_selection()
|
||||
model, iter = selection.get_selected()
|
||||
|
||||
if iter:
|
||||
path = model.get_path(iter)[0]
|
||||
model.remove(iter)
|
||||
|
||||
def gen_editable_settings(self, setting, tooltip=""):
|
||||
setting_hbox = gtk.HBox(False, 12)
|
||||
|
||||
vbox = gtk.VBox(False, 12)
|
||||
setting_hbox.pack_start(vbox, expand=True, fill=True)
|
||||
|
||||
setting_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
for key in setting.keys():
|
||||
setting_store.set(setting_store.append(), 0, key, 1, setting[key])
|
||||
|
||||
setting_tree = gtk.TreeView(setting_store)
|
||||
setting_tree.set_headers_visible(True)
|
||||
setting_tree.set_size_request(300, 100)
|
||||
|
||||
col = gtk.TreeViewColumn('Key')
|
||||
col.set_min_width(100)
|
||||
col.set_max_width(150)
|
||||
col.set_resizable(True)
|
||||
col1 = gtk.TreeViewColumn('Value')
|
||||
col1.set_min_width(100)
|
||||
col1.set_max_width(150)
|
||||
col1.set_resizable(True)
|
||||
setting_tree.append_column(col)
|
||||
setting_tree.append_column(col1)
|
||||
cell = gtk.CellRendererText()
|
||||
cell.set_property('width-chars', 10)
|
||||
cell.set_property('editable', True)
|
||||
cell.set_data("column", 0)
|
||||
cell.connect("edited", self.editable_settings_cell_edited, setting_store)
|
||||
cell1 = gtk.CellRendererText()
|
||||
cell1.set_property('width-chars', 10)
|
||||
cell1.set_property('editable', True)
|
||||
cell1.set_data("column", 1)
|
||||
cell1.connect("edited", self.editable_settings_cell_edited, setting_store)
|
||||
col.pack_start(cell, True)
|
||||
col1.pack_end(cell1, True)
|
||||
col.set_attributes(cell, text=0)
|
||||
col1.set_attributes(cell1, text=1)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scroll.add(setting_tree)
|
||||
vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
# some buttons
|
||||
hbox = gtk.HBox(True, 6)
|
||||
vbox.pack_start(hbox, False, False)
|
||||
|
||||
button = gtk.Button(stock=gtk.STOCK_ADD)
|
||||
button.connect("clicked", self.editable_settings_add_item_clicked, setting_store)
|
||||
hbox.pack_start(button)
|
||||
|
||||
button = gtk.Button(stock=gtk.STOCK_REMOVE)
|
||||
button.connect("clicked", self.editable_settings_remove_item_clicked, setting_tree)
|
||||
hbox.pack_start(button)
|
||||
|
||||
info = HobInfoButton(tooltip, self)
|
||||
setting_hbox.pack_start(info, expand=False, fill=False)
|
||||
|
||||
return setting_hbox, setting_store
|
||||
|
||||
def create_others_page(self):
|
||||
advanced_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.set_border_width(6)
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=True, fill=True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Add your own variables:</span>")
|
||||
tooltip = "These are key/value pairs for your extra settings. Click \'Add\' and then directly edit the key and the value"
|
||||
setting_widget, self.setting_store = self.gen_editable_settings(self.configuration.extra_setting,"<b>Add your own variables</b>" + "*" + tooltip)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(setting_widget, expand=True, fill=True)
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.nb = gtk.Notebook()
|
||||
self.nb.set_show_tabs(True)
|
||||
self.nb.append_page(self.create_build_environment_page(), gtk.Label("Build environment"))
|
||||
self.nb.append_page(self.create_shared_state_page(), gtk.Label("Shared state"))
|
||||
self.nb.append_page(self.create_network_page(), gtk.Label("Network"))
|
||||
self.nb.append_page(self.create_others_page(), gtk.Label("Others"))
|
||||
self.nb.set_current_page(0)
|
||||
self.vbox.pack_start(self.nb, expand=True, fill=True)
|
||||
self.vbox.pack_end(gtk.HSeparator(), expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def destroy(self):
|
||||
self.handler.disconnect(self.proxy_test_passed_id)
|
||||
self.handler.disconnect(self.proxy_test_failed_id)
|
||||
super(SimpleSettingsDialog, self).destroy()
|
||||
639
bitbake/lib/bb/ui/crumbs/hobeventhandler.py
Normal file
639
bitbake/lib/bb/ui/crumbs/hobeventhandler.py
Normal file
@@ -0,0 +1,639 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import logging
|
||||
import ast
|
||||
from bb.ui.crumbs.runningbuild import RunningBuild
|
||||
|
||||
class HobHandler(gobject.GObject):
|
||||
|
||||
"""
|
||||
This object does BitBake event handling for the hob gui.
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"package-formats-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"config-updated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
|
||||
"command-succeeded" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"parsing-warning" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"sanity-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_INT)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"data-generated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"parsing-started" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"parsing-completed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"recipe-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"package-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"network-passed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"network-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
(GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO, SANITY_CHECK, NETWORK_TEST) = range(7)
|
||||
(SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_SANITY_CHECK,
|
||||
SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE, SUB_NETWORK_TEST) = range(12)
|
||||
|
||||
def __init__(self, server, recipe_model, package_model):
|
||||
super(HobHandler, self).__init__()
|
||||
|
||||
self.build = RunningBuild(sequential=True)
|
||||
|
||||
self.recipe_model = recipe_model
|
||||
self.package_model = package_model
|
||||
|
||||
self.commands_async = []
|
||||
self.generating = False
|
||||
self.current_phase = None
|
||||
self.building = False
|
||||
self.recipe_queue = []
|
||||
self.package_queue = []
|
||||
|
||||
self.server = server
|
||||
self.error_msg = ""
|
||||
self.initcmd = None
|
||||
self.parsing = False
|
||||
|
||||
def set_busy(self):
|
||||
if not self.generating:
|
||||
self.emit("generating-data")
|
||||
self.generating = True
|
||||
|
||||
def clear_busy(self):
|
||||
if self.generating:
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
|
||||
def runCommand(self, commandline):
|
||||
try:
|
||||
result, error = self.server.runCommand(commandline)
|
||||
if error:
|
||||
raise Exception("Error running command '%s': %s" % (commandline, error))
|
||||
return result
|
||||
except Exception as e:
|
||||
self.commands_async = []
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", "Hob Exception - %s" % (str(e)))
|
||||
return None
|
||||
|
||||
def run_next_command(self, initcmd=None):
|
||||
if initcmd != None:
|
||||
self.initcmd = initcmd
|
||||
|
||||
if self.commands_async:
|
||||
self.set_busy()
|
||||
next_command = self.commands_async.pop(0)
|
||||
else:
|
||||
self.clear_busy()
|
||||
if self.initcmd != None:
|
||||
self.emit("command-succeeded", self.initcmd)
|
||||
return
|
||||
|
||||
if next_command == self.SUB_PATH_LAYERS:
|
||||
self.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
elif next_command == self.SUB_FILES_DISTRO:
|
||||
self.runCommand(["findConfigFiles", "DISTRO"])
|
||||
elif next_command == self.SUB_FILES_MACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE"])
|
||||
elif next_command == self.SUB_FILES_SDKMACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
elif next_command == self.SUB_MATCH_CLASS:
|
||||
self.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
elif next_command == self.SUB_PARSE_CONFIG:
|
||||
self.runCommand(["resetCooker"])
|
||||
elif next_command == self.SUB_GNERATE_TGTS:
|
||||
self.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
|
||||
elif next_command == self.SUB_GENERATE_PKGINFO:
|
||||
self.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
|
||||
elif next_command == self.SUB_SANITY_CHECK:
|
||||
self.runCommand(["triggerEvent", "bb.event.SanityCheck()"])
|
||||
elif next_command == self.SUB_NETWORK_TEST:
|
||||
self.runCommand(["triggerEvent", "bb.event.NetworkTest()"])
|
||||
elif next_command == self.SUB_BUILD_RECIPES:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
self.runCommand(["buildTargets", self.recipe_queue, self.default_task])
|
||||
self.recipe_queue = []
|
||||
elif next_command == self.SUB_BUILD_IMAGE:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
target = self.image
|
||||
|
||||
if self.base_image:
|
||||
# Request the build of a custom image
|
||||
self.generate_hob_base_image(target)
|
||||
self.set_var_in_file("LINGUAS_INSTALL", "", "local.conf")
|
||||
hobImage = self.runCommand(["matchFile", target + ".bb"])
|
||||
if self.base_image != self.recipe_model.__custom_image__:
|
||||
baseImage = self.runCommand(["matchFile", self.base_image + ".bb"])
|
||||
version = self.runCommand(["generateNewImage", hobImage, baseImage, self.package_queue, True, ""])
|
||||
target += version
|
||||
self.recipe_model.set_custom_image_version(version)
|
||||
|
||||
targets = [target]
|
||||
if self.toolchain_packages:
|
||||
self.set_var_in_file("TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages), "local.conf")
|
||||
targets.append(target + ":do_populate_sdk")
|
||||
|
||||
self.runCommand(["buildTargets", targets, self.default_task])
|
||||
|
||||
def display_error(self):
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
if self.building:
|
||||
self.building = False
|
||||
|
||||
def handle_event(self, event):
|
||||
if not event:
|
||||
return
|
||||
if self.building:
|
||||
self.current_phase = "building"
|
||||
self.build.handle_event(event)
|
||||
|
||||
if isinstance(event, bb.event.PackageInfo):
|
||||
self.package_model.populate(event._pkginfolist)
|
||||
self.emit("package-populated")
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckPassed):
|
||||
reparse = self.runCommand(["getVariable", "BB_INVALIDCONF"]) or None
|
||||
if reparse is True:
|
||||
self.set_var_in_file("BB_INVALIDCONF", False, "local.conf")
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.prepend(self.SUB_PARSE_CONFIG)
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckFailed):
|
||||
self.emit("sanity-failed", event._msg, event._network_error)
|
||||
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
if not self.building:
|
||||
if event.levelno >= logging.ERROR:
|
||||
formatter = bb.msg.BBLogFormatter()
|
||||
msg = formatter.format(event)
|
||||
self.error_msg += msg + '\n'
|
||||
elif event.levelno >= logging.WARNING and self.parsing == True:
|
||||
formatter = bb.msg.BBLogFormatter()
|
||||
msg = formatter.format(event)
|
||||
warn_msg = msg + '\n'
|
||||
self.emit("parsing-warning", warn_msg)
|
||||
|
||||
elif isinstance(event, bb.event.TargetsTreeGenerated):
|
||||
self.current_phase = "data generation"
|
||||
if event._model:
|
||||
self.recipe_model.populate(event._model)
|
||||
self.emit("recipe-populated")
|
||||
elif isinstance(event, bb.event.ConfigFilesFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
var = event._variable
|
||||
values = event._values
|
||||
values.sort()
|
||||
self.emit("config-updated", var, values)
|
||||
elif isinstance(event, bb.event.ConfigFilePathFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
elif isinstance(event, bb.event.FilesMatchingFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
# FIXME: hard coding, should at least be a variable shared between
|
||||
# here and the caller
|
||||
if event._pattern == "rootfs_":
|
||||
formats = []
|
||||
for match in event._matches:
|
||||
classname, sep, cls = match.rpartition(".")
|
||||
fs, sep, format = classname.rpartition("_")
|
||||
formats.append(format)
|
||||
formats.sort()
|
||||
self.emit("package-formats-updated", formats)
|
||||
elif isinstance(event, bb.command.CommandCompleted):
|
||||
self.current_phase = None
|
||||
self.run_next_command()
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
if event.error not in ("Forced shutdown", "Stopped build"):
|
||||
self.error_msg += event.error
|
||||
self.commands_async = []
|
||||
self.display_error()
|
||||
elif isinstance(event, (bb.event.ParseStarted,
|
||||
bb.event.CacheLoadStarted,
|
||||
bb.event.TreeDataPreparationStarted,
|
||||
)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = 0
|
||||
message["total"] = None
|
||||
message["title"] = "Parsing recipes"
|
||||
self.emit("parsing-started", message)
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
self.parsing = True
|
||||
elif isinstance(event, (bb.event.ParseProgress,
|
||||
bb.event.CacheLoadProgress,
|
||||
bb.event.TreeDataPreparationProgress)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.current
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes"
|
||||
self.emit("parsing", message)
|
||||
elif isinstance(event, (bb.event.ParseCompleted,
|
||||
bb.event.CacheLoadCompleted,
|
||||
bb.event.TreeDataPreparationCompleted)):
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
message["current"] = event.total
|
||||
message["total"] = event.total
|
||||
message["title"] = "Parsing recipes"
|
||||
self.emit("parsing-completed", message)
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
self.parsing = False
|
||||
elif isinstance(event, bb.event.NetworkTestFailed):
|
||||
self.emit("network-failed")
|
||||
self.run_next_command()
|
||||
elif isinstance(event, bb.event.NetworkTestPassed):
|
||||
self.emit("network-passed")
|
||||
self.run_next_command()
|
||||
|
||||
if self.error_msg and not self.commands_async:
|
||||
self.display_error()
|
||||
|
||||
return
|
||||
|
||||
def init_cooker(self):
|
||||
self.runCommand(["createConfigFile", ".hob.conf"])
|
||||
|
||||
def set_extra_inherit(self, bbclass):
|
||||
self.append_var_in_file("INHERIT", bbclass, ".hob.conf")
|
||||
|
||||
def set_bblayers(self, bblayers):
|
||||
self.set_var_in_file("BBLAYERS", " ".join(bblayers), "bblayers.conf")
|
||||
|
||||
def set_machine(self, machine):
|
||||
if machine:
|
||||
self.early_assign_var_in_file("MACHINE", machine, "local.conf")
|
||||
|
||||
def set_sdk_machine(self, sdk_machine):
|
||||
self.set_var_in_file("SDKMACHINE", sdk_machine, "local.conf")
|
||||
|
||||
def set_image_fstypes(self, image_fstypes):
|
||||
self.set_var_in_file("IMAGE_FSTYPES", image_fstypes, "local.conf")
|
||||
|
||||
def set_distro(self, distro):
|
||||
self.set_var_in_file("DISTRO", distro, "local.conf")
|
||||
|
||||
def set_package_format(self, format):
|
||||
package_classes = ""
|
||||
for pkgfmt in format.split():
|
||||
package_classes += ("package_%s" % pkgfmt + " ")
|
||||
self.set_var_in_file("PACKAGE_CLASSES", package_classes, "local.conf")
|
||||
|
||||
def set_bbthreads(self, threads):
|
||||
self.set_var_in_file("BB_NUMBER_THREADS", threads, "local.conf")
|
||||
|
||||
def set_pmake(self, threads):
|
||||
pmake = "-j %s" % threads
|
||||
self.set_var_in_file("PARALLEL_MAKE", pmake, "local.conf")
|
||||
|
||||
def set_dl_dir(self, directory):
|
||||
self.set_var_in_file("DL_DIR", directory, "local.conf")
|
||||
|
||||
def set_sstate_dir(self, directory):
|
||||
self.set_var_in_file("SSTATE_DIR", directory, "local.conf")
|
||||
|
||||
def set_sstate_mirrors(self, url):
|
||||
self.set_var_in_file("SSTATE_MIRRORS", url, "local.conf")
|
||||
|
||||
def set_extra_size(self, image_extra_size):
|
||||
self.set_var_in_file("IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size), "local.conf")
|
||||
|
||||
def set_rootfs_size(self, image_rootfs_size):
|
||||
self.set_var_in_file("IMAGE_ROOTFS_SIZE", str(image_rootfs_size), "local.conf")
|
||||
|
||||
def set_incompatible_license(self, incompat_license):
|
||||
self.set_var_in_file("INCOMPATIBLE_LICENSE", incompat_license, "local.conf")
|
||||
|
||||
def set_extra_setting(self, extra_setting):
|
||||
self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
|
||||
|
||||
def set_extra_config(self, extra_setting):
|
||||
old_extra_setting = self.runCommand(["getVariable", "EXTRA_SETTING"]) or {}
|
||||
old_extra_setting = str(old_extra_setting)
|
||||
|
||||
old_extra_setting = ast.literal_eval(old_extra_setting)
|
||||
if not type(old_extra_setting) == dict:
|
||||
old_extra_setting = {}
|
||||
|
||||
# settings not changed
|
||||
if old_extra_setting == extra_setting:
|
||||
return
|
||||
|
||||
# remove the old EXTRA SETTING variable
|
||||
self.remove_var_from_file("EXTRA_SETTING")
|
||||
|
||||
# remove old settings from conf
|
||||
for key in old_extra_setting.keys():
|
||||
if key not in extra_setting:
|
||||
self.remove_var_from_file(key)
|
||||
|
||||
# add new settings
|
||||
for key, value in extra_setting.iteritems():
|
||||
self.set_var_in_file(key, value, "local.conf")
|
||||
|
||||
if extra_setting:
|
||||
self.set_var_in_file("EXTRA_SETTING", extra_setting, "local.conf")
|
||||
|
||||
def set_http_proxy(self, http_proxy):
|
||||
self.set_var_in_file("http_proxy", http_proxy, "local.conf")
|
||||
|
||||
def set_https_proxy(self, https_proxy):
|
||||
self.set_var_in_file("https_proxy", https_proxy, "local.conf")
|
||||
|
||||
def set_ftp_proxy(self, ftp_proxy):
|
||||
self.set_var_in_file("ftp_proxy", ftp_proxy, "local.conf")
|
||||
|
||||
def set_socks_proxy(self, socks_proxy):
|
||||
self.set_var_in_file("all_proxy", socks_proxy, "local.conf")
|
||||
|
||||
def set_cvs_proxy(self, host, port):
|
||||
self.set_var_in_file("CVS_PROXY_HOST", host, "local.conf")
|
||||
self.set_var_in_file("CVS_PROXY_PORT", port, "local.conf")
|
||||
|
||||
def request_package_info(self):
|
||||
self.commands_async.append(self.SUB_GENERATE_PKGINFO)
|
||||
self.run_next_command(self.POPULATE_PACKAGEINFO)
|
||||
|
||||
def trigger_sanity_check(self):
|
||||
self.commands_async.append(self.SUB_SANITY_CHECK)
|
||||
self.run_next_command(self.SANITY_CHECK)
|
||||
|
||||
def trigger_network_test(self):
|
||||
self.commands_async.append(self.SUB_NETWORK_TEST)
|
||||
self.run_next_command(self.NETWORK_TEST)
|
||||
|
||||
def generate_configuration(self):
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_PATH_LAYERS)
|
||||
self.commands_async.append(self.SUB_FILES_DISTRO)
|
||||
self.commands_async.append(self.SUB_FILES_MACH)
|
||||
self.commands_async.append(self.SUB_FILES_SDKMACH)
|
||||
self.commands_async.append(self.SUB_MATCH_CLASS)
|
||||
self.run_next_command(self.GENERATE_CONFIGURATION)
|
||||
|
||||
def generate_recipes(self):
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_GNERATE_TGTS)
|
||||
self.run_next_command(self.GENERATE_RECIPES)
|
||||
|
||||
def generate_packages(self, tgts, default_task="build"):
|
||||
targets = []
|
||||
targets.extend(tgts)
|
||||
self.recipe_queue = targets
|
||||
self.default_task = default_task
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_RECIPES)
|
||||
self.run_next_command(self.GENERATE_PACKAGES)
|
||||
|
||||
def generate_image(self, image, base_image, image_packages=[], toolchain_packages=[], default_task="build"):
|
||||
self.image = image
|
||||
self.base_image = base_image
|
||||
self.package_queue = image_packages
|
||||
self.toolchain_packages = toolchain_packages
|
||||
self.default_task = default_task
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_IMAGE)
|
||||
self.run_next_command(self.GENERATE_IMAGE)
|
||||
|
||||
def generate_new_image(self, image, base_image, package_queue, description):
|
||||
if base_image:
|
||||
base_image = self.runCommand(["matchFile", self.base_image + ".bb"])
|
||||
self.runCommand(["generateNewImage", image, base_image, package_queue, False, description])
|
||||
|
||||
def generate_hob_base_image(self, hob_image):
|
||||
image_dir = self.get_topdir() + "/recipes/images/"
|
||||
recipe_name = hob_image + ".bb"
|
||||
self.ensure_dir(image_dir)
|
||||
self.generate_new_image(image_dir + recipe_name, None, [], "")
|
||||
|
||||
def ensure_dir(self, directory):
|
||||
self.runCommand(["ensureDir", directory])
|
||||
|
||||
def build_succeeded_async(self):
|
||||
self.building = False
|
||||
|
||||
def build_failed_async(self):
|
||||
self.initcmd = None
|
||||
self.commands_async = []
|
||||
self.building = False
|
||||
|
||||
def cancel_parse(self):
|
||||
self.runCommand(["stateForceShutdown"])
|
||||
|
||||
def cancel_build(self, force=False):
|
||||
if force:
|
||||
# Force the cooker to stop as quickly as possible
|
||||
self.runCommand(["stateForceShutdown"])
|
||||
else:
|
||||
# Wait for tasks to complete before shutting down, this helps
|
||||
# leave the workdir in a usable state
|
||||
self.runCommand(["stateShutdown"])
|
||||
|
||||
def reset_build(self):
|
||||
self.build.reset()
|
||||
|
||||
def get_logfile(self):
|
||||
return self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
|
||||
|
||||
def get_topdir(self):
|
||||
return self.runCommand(["getVariable", "TOPDIR"]) or ""
|
||||
|
||||
def _remove_redundant(self, string):
|
||||
ret = []
|
||||
for i in string.split():
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return " ".join(ret)
|
||||
|
||||
def set_var_in_file(self, var, val, default_file=None):
|
||||
self.runCommand(["enableDataTracking"])
|
||||
self.server.runCommand(["setVarFile", var, val, default_file, "set"])
|
||||
self.runCommand(["disableDataTracking"])
|
||||
|
||||
def early_assign_var_in_file(self, var, val, default_file=None):
|
||||
self.runCommand(["enableDataTracking"])
|
||||
self.server.runCommand(["setVarFile", var, val, default_file, "earlyAssign"])
|
||||
self.runCommand(["disableDataTracking"])
|
||||
|
||||
def remove_var_from_file(self, var):
|
||||
self.server.runCommand(["removeVarFile", var])
|
||||
|
||||
def append_var_in_file(self, var, val, default_file=None):
|
||||
self.server.runCommand(["setVarFile", var, val, default_file, "append"])
|
||||
|
||||
def append_to_bbfiles(self, val):
|
||||
bbfiles = self.runCommand(["getVariable", "BBFILES", "False"]) or ""
|
||||
bbfiles = bbfiles.split()
|
||||
if val not in bbfiles:
|
||||
self.append_var_in_file("BBFILES", val, "bblayers.conf")
|
||||
|
||||
def get_parameters(self):
|
||||
# retrieve the parameters from bitbake
|
||||
params = {}
|
||||
params["core_base"] = self.runCommand(["getVariable", "COREBASE"]) or ""
|
||||
params["layer"] = self.runCommand(["getVariable", "BBLAYERS"]) or ""
|
||||
params["layers_non_removable"] = self.runCommand(["getVariable", "BBLAYERS_NON_REMOVABLE"]) or ""
|
||||
params["dldir"] = self.runCommand(["getVariable", "DL_DIR"]) or ""
|
||||
params["machine"] = self.runCommand(["getVariable", "MACHINE"]) or ""
|
||||
params["distro"] = self.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
|
||||
params["pclass"] = self.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
|
||||
params["sstatedir"] = self.runCommand(["getVariable", "SSTATE_DIR"]) or ""
|
||||
params["sstatemirror"] = self.runCommand(["getVariable", "SSTATE_MIRRORS"]) or ""
|
||||
|
||||
num_threads = self.runCommand(["getCpuCount"])
|
||||
if not num_threads:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
else:
|
||||
try:
|
||||
num_threads = int(num_threads)
|
||||
max_threads = 16 * num_threads
|
||||
except:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
params["max_threads"] = max_threads
|
||||
|
||||
bbthread = self.runCommand(["getVariable", "BB_NUMBER_THREADS"])
|
||||
if not bbthread:
|
||||
bbthread = num_threads
|
||||
else:
|
||||
try:
|
||||
bbthread = int(bbthread)
|
||||
except:
|
||||
bbthread = num_threads
|
||||
params["bbthread"] = bbthread
|
||||
|
||||
pmake = self.runCommand(["getVariable", "PARALLEL_MAKE"])
|
||||
if not pmake:
|
||||
pmake = num_threads
|
||||
elif isinstance(pmake, int):
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
pmake = int(pmake.lstrip("-j "))
|
||||
except:
|
||||
pmake = num_threads
|
||||
params["pmake"] = "-j %s" % pmake
|
||||
|
||||
params["image_addr"] = self.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
|
||||
|
||||
image_extra_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
|
||||
if not image_extra_size:
|
||||
image_extra_size = 0
|
||||
else:
|
||||
try:
|
||||
image_extra_size = int(image_extra_size)
|
||||
except:
|
||||
image_extra_size = 0
|
||||
params["image_extra_size"] = image_extra_size
|
||||
|
||||
image_rootfs_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
|
||||
if not image_rootfs_size:
|
||||
image_rootfs_size = 0
|
||||
else:
|
||||
try:
|
||||
image_rootfs_size = int(image_rootfs_size)
|
||||
except:
|
||||
image_rootfs_size = 0
|
||||
params["image_rootfs_size"] = image_rootfs_size
|
||||
|
||||
image_overhead_factor = self.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
|
||||
if not image_overhead_factor:
|
||||
image_overhead_factor = 1
|
||||
else:
|
||||
try:
|
||||
image_overhead_factor = float(image_overhead_factor)
|
||||
except:
|
||||
image_overhead_factor = 1
|
||||
params['image_overhead_factor'] = image_overhead_factor
|
||||
|
||||
params["incompat_license"] = self._remove_redundant(self.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
|
||||
params["sdk_machine"] = self.runCommand(["getVariable", "SDKMACHINE"]) or self.runCommand(["getVariable", "SDK_ARCH"]) or ""
|
||||
|
||||
params["image_fstypes"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
|
||||
|
||||
params["image_types"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
|
||||
|
||||
params["conf_version"] = self.runCommand(["getVariable", "CONF_VERSION"]) or ""
|
||||
params["lconf_version"] = self.runCommand(["getVariable", "LCONF_VERSION"]) or ""
|
||||
|
||||
params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
|
||||
params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
|
||||
params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
|
||||
params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
|
||||
params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
|
||||
params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
|
||||
params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
|
||||
params["target_arch"] = self.runCommand(["getVariable", "TARGET_ARCH"]) or ""
|
||||
params["tune_pkgarch"] = self.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
|
||||
params["bb_version"] = self.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
|
||||
|
||||
params["default_task"] = self.runCommand(["getVariable", "BB_DEFAULT_TASK"]) or "build"
|
||||
|
||||
params["socks_proxy"] = self.runCommand(["getVariable", "all_proxy"]) or ""
|
||||
params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
|
||||
params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
|
||||
params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
|
||||
|
||||
params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
|
||||
params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
|
||||
|
||||
params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
|
||||
params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
|
||||
return params
|
||||
903
bitbake/lib/bb/ui/crumbs/hoblistmodel.py
Normal file
903
bitbake/lib/bb/ui/crumbs/hoblistmodel.py
Normal file
@@ -0,0 +1,903 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# PackageListModel
|
||||
#
|
||||
class PackageListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_RCP, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT, COL_FLIST) = range(15)
|
||||
|
||||
__gsignals__ = {
|
||||
"package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
__toolchain_required_packages__ = ["packagegroup-core-standalone-sdk-target", "packagegroup-core-standalone-sdk-target-dbg"]
|
||||
|
||||
def __init__(self):
|
||||
self.rprov_pkg = {}
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING)
|
||||
self.sort_column_id, self.sort_order = PackageListModel.COL_NAME, gtk.SORT_ASCENDING
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
pkg = item_name
|
||||
if item_name not in self.pn_path.keys():
|
||||
if item_name not in self.rprov_pkg.keys():
|
||||
return None
|
||||
pkg = self.rprov_pkg[item_name]
|
||||
if pkg not in self.pn_path.keys():
|
||||
return None
|
||||
|
||||
return self.pn_path[pkg]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
|
||||
for key in filter.keys():
|
||||
if key == self.COL_NAME:
|
||||
if filter[key] != 'Search packages by name':
|
||||
if name and filter[key] not in name:
|
||||
return False
|
||||
else:
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
self.filtered_nb += 1
|
||||
return True
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
|
||||
model = self.filter_new()
|
||||
self.filtered_nb = 0
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
sort.connect ('sort-column-changed', self.sort_column_changed_cb)
|
||||
if initial:
|
||||
sort.set_sort_column_id(PackageListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
elif excluded_items_ahead:
|
||||
sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
|
||||
elif included_items_ahead:
|
||||
sort.set_default_sort_func(self.include_item_sort_func, search_data)
|
||||
else:
|
||||
if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
|
||||
sort.set_default_sort_func(self.sort_func, search_data)
|
||||
else:
|
||||
sort.set_sort_column_id(self.sort_column_id, self.sort_order)
|
||||
sort.set_default_sort_func(None)
|
||||
|
||||
sort.set_sort_func(PackageListModel.COL_INC, self.sort_column, PackageListModel.COL_INC)
|
||||
sort.set_sort_func(PackageListModel.COL_SIZE, self.sort_column, PackageListModel.COL_SIZE)
|
||||
sort.set_sort_func(PackageListModel.COL_BINB, self.sort_binb_column)
|
||||
sort.set_sort_func(PackageListModel.COL_RCP, self.sort_column, PackageListModel.COL_RCP)
|
||||
return sort
|
||||
|
||||
def sort_column_changed_cb (self, data):
|
||||
self.sort_column_id, self.sort_order = data.get_sort_column_id ()
|
||||
|
||||
def sort_column(self, model, row1, row2, col):
|
||||
value1 = model.get_value(row1, col)
|
||||
value2 = model.get_value(row2, col)
|
||||
if col==PackageListModel.COL_SIZE:
|
||||
value1 = HobPage._string_to_size(value1)
|
||||
value2 = HobPage._string_to_size(value2)
|
||||
|
||||
cmp_res = cmp(value1, value2)
|
||||
if cmp_res!=0:
|
||||
if col==PackageListModel.COL_INC:
|
||||
return -cmp_res
|
||||
else:
|
||||
return cmp_res
|
||||
else:
|
||||
name1 = model.get_value(row1, PackageListModel.COL_NAME)
|
||||
name2 = model.get_value(row2, PackageListModel.COL_NAME)
|
||||
return cmp(name1,name2)
|
||||
|
||||
def sort_binb_column(self, model, row1, row2):
|
||||
value1 = model.get_value(row1, PackageListModel.COL_BINB)
|
||||
value2 = model.get_value(row2, PackageListModel.COL_BINB)
|
||||
value1_list = value1.split(', ')
|
||||
value2_list = value2.split(', ')
|
||||
|
||||
value1 = value1_list[0]
|
||||
value2 = value2_list[0]
|
||||
|
||||
cmp_res = cmp(value1, value2)
|
||||
if cmp_res==0:
|
||||
cmp_size = cmp(len(value1_list), len(value2_list))
|
||||
if cmp_size==0:
|
||||
name1 = model.get_value(row1, PackageListModel.COL_NAME)
|
||||
name2 = model.get_value(row2, PackageListModel.COL_NAME)
|
||||
return cmp(name1,name2)
|
||||
else:
|
||||
return cmp_size
|
||||
else:
|
||||
return cmp_res
|
||||
|
||||
def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
|
||||
if user_data:
|
||||
val1 = model.get_value(iter1, PackageListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, PackageListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
else:
|
||||
val1 = model.get_value(iter1, PackageListModel.COL_FADE_INC)
|
||||
val2 = model.get_value(iter2, PackageListModel.COL_INC)
|
||||
return ((val1 == True) and (val2 == False))
|
||||
|
||||
def include_item_sort_func(self, model, iter1, iter2, user_data=None):
|
||||
if user_data:
|
||||
val1 = model.get_value(iter1, PackageListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, PackageListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
else:
|
||||
val1 = model.get_value(iter1, PackageListModel.COL_INC)
|
||||
val2 = model.get_value(iter2, PackageListModel.COL_INC)
|
||||
return ((val1 == False) and (val2 == True))
|
||||
|
||||
def sort_func(self, model, iter1, iter2, user_data):
|
||||
val1 = model.get_value(iter1, PackageListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, PackageListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
|
||||
def cmp_vals(self, val1, val2, user_data):
|
||||
if val1 is None or val2 is None:
|
||||
return 0
|
||||
elif val1.startswith(user_data) and not val2.startswith(user_data):
|
||||
return -1
|
||||
elif not val1.startswith(user_data) and val2.startswith(user_data):
|
||||
return 1
|
||||
else:
|
||||
return cmp(val1, val2)
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
# view_model is the model sorted
|
||||
# get the path of the model filtered
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
# get the model filtered
|
||||
filtered_model = view_model.get_model()
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
name = self.find_item_for_path(path)
|
||||
view_name = view_model.get_value(it, PackageListModel.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(it)
|
||||
return view_path
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.PackageInfo event and populates the package list.
|
||||
"""
|
||||
def populate(self, pkginfolist):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
|
||||
def getpkgvalue(pkgdict, key, pkgname, defaultval = None):
|
||||
value = pkgdict.get('%s_%s' % (key, pkgname), None)
|
||||
if not value:
|
||||
value = pkgdict.get(key, defaultval)
|
||||
return value
|
||||
|
||||
for pkginfo in pkginfolist:
|
||||
pn = pkginfo['PN']
|
||||
pv = pkginfo['PV']
|
||||
pr = pkginfo['PR']
|
||||
pkg = pkginfo['PKG']
|
||||
pkgv = getpkgvalue(pkginfo, 'PKGV', pkg)
|
||||
pkgr = getpkgvalue(pkginfo, 'PKGR', pkg)
|
||||
# PKGSIZE is artificial, will always be overridden with the package name if present
|
||||
pkgsize = int(pkginfo.get('PKGSIZE_%s' % pkg, "0"))
|
||||
# PKG_%s is the renamed version
|
||||
pkg_rename = pkginfo.get('PKG_%s' % pkg, "")
|
||||
# The rest may be overridden or not
|
||||
section = getpkgvalue(pkginfo, 'SECTION', pkg, "")
|
||||
summary = getpkgvalue(pkginfo, 'SUMMARY', pkg, "")
|
||||
rdep = getpkgvalue(pkginfo, 'RDEPENDS', pkg, "")
|
||||
rrec = getpkgvalue(pkginfo, 'RRECOMMENDS', pkg, "")
|
||||
rprov = getpkgvalue(pkginfo, 'RPROVIDES', pkg, "")
|
||||
files_list = getpkgvalue(pkginfo, 'FILES_INFO', pkg, "")
|
||||
for i in rprov.split():
|
||||
self.rprov_pkg[i] = pkg
|
||||
|
||||
recipe = pn + '-' + pv + '-' + pr
|
||||
|
||||
allow_empty = getpkgvalue(pkginfo, 'ALLOW_EMPTY', pkg, "")
|
||||
|
||||
if pkgsize == 0 and not allow_empty:
|
||||
continue
|
||||
|
||||
size = HobPage._size_to_string(pkgsize)
|
||||
self.set(self.append(), self.COL_NAME, pkg, self.COL_VER, pkgv,
|
||||
self.COL_REV, pkgr, self.COL_RNM, pkg_rename,
|
||||
self.COL_SEC, section, self.COL_SUM, summary,
|
||||
self.COL_RDEP, rdep + ' ' + rrec,
|
||||
self.COL_RPROV, rprov, self.COL_SIZE, size,
|
||||
self.COL_RCP, recipe, self.COL_BINB, "",
|
||||
self.COL_INC, False, self.COL_FONT, '10', self.COL_FLIST, files_list)
|
||||
|
||||
self.pn_path = {}
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
pn = self.get_value(it, self.COL_NAME)
|
||||
path = self.get_path(it)
|
||||
self.pn_path[pn] = path
|
||||
it = self.iter_next(it)
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("package-selection-changed")
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
"""
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb=""):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_RDEP]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name)
|
||||
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_RDEP]
|
||||
if item_deps:
|
||||
for dep in item_deps.split(" "):
|
||||
if dep.startswith('('):
|
||||
continue
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
"""
|
||||
Empty self.contents by setting the include of each entry to None
|
||||
"""
|
||||
def reset(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "")
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
def get_selected_packages(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return packagelist
|
||||
|
||||
def get_user_selected_packages(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
binb = self.get_value(it, self.COL_BINB)
|
||||
if binb == "User Selected":
|
||||
name = self.get_value(it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return packagelist
|
||||
|
||||
def get_selected_packages_toolchain(self):
|
||||
packagelist = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_NAME)
|
||||
if name.endswith("-dev") or name.endswith("-dbg"):
|
||||
packagelist.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(packagelist + self.__toolchain_required_packages__));
|
||||
|
||||
"""
|
||||
Package model may be incomplete, therefore when calling the
|
||||
set_selected_packages(), some packages will not be set included.
|
||||
Return the un-set packages list.
|
||||
"""
|
||||
def set_selected_packages(self, packagelist, user_selected=False):
|
||||
left = []
|
||||
binb = 'User Selected' if user_selected else ''
|
||||
for pn in packagelist:
|
||||
if pn in self.pn_path.keys():
|
||||
path = self.pn_path[pn]
|
||||
self.include_item(item_path=path, binb=binb)
|
||||
else:
|
||||
left.append(pn)
|
||||
|
||||
self.selection_change_notification()
|
||||
return left
|
||||
|
||||
"""
|
||||
Return the selected package size, unit is B.
|
||||
"""
|
||||
def get_packages_size(self):
|
||||
packages_size = 0
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
str_size = self.get_value(it, self.COL_SIZE)
|
||||
if not str_size:
|
||||
continue
|
||||
|
||||
packages_size += HobPage._string_to_size(str_size)
|
||||
|
||||
it = self.iter_next(it)
|
||||
return packages_size
|
||||
|
||||
"""
|
||||
Resync the state of included items to a backup column before performing the fadeout visible effect
|
||||
"""
|
||||
def resync_fadeout_column(self, model_first_iter=None):
|
||||
it = model_first_iter
|
||||
while it:
|
||||
active = self.get_value(it, self.COL_INC)
|
||||
self.set(it, self.COL_FADE_INC, active)
|
||||
it = self.iter_next(it)
|
||||
|
||||
#
|
||||
# RecipeListModel
|
||||
#
|
||||
class RecipeListModel(gtk.ListStore):
|
||||
"""
|
||||
This class defines an gtk.ListStore subclass which will convert the output
|
||||
of the bb.event.TargetsTreeGenerated event into a gtk.ListStore whilst also
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC, COL_SUMMARY, COL_VERSION,
|
||||
COL_REVISION, COL_HOMEPAGE, COL_BUGTRACKER, COL_FILE) = range(18)
|
||||
|
||||
__custom_image__ = "Start with an empty image recipe"
|
||||
|
||||
__gsignals__ = {
|
||||
"recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
"""
|
||||
"""
|
||||
def __init__(self):
|
||||
gtk.ListStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING)
|
||||
self.sort_column_id, self.sort_order = RecipeListModel.COL_NAME, gtk.SORT_ASCENDING
|
||||
|
||||
"""
|
||||
Find the model path for the item_name
|
||||
Returns the path in the model or None
|
||||
"""
|
||||
def find_path_for_item(self, item_name):
|
||||
if self.non_target_name(item_name) or item_name not in self.pn_path.keys():
|
||||
return None
|
||||
else:
|
||||
return self.pn_path[item_name]
|
||||
|
||||
def find_item_for_path(self, item_path):
|
||||
return self[item_path][self.COL_NAME]
|
||||
|
||||
"""
|
||||
Helper method to determine whether name is a target pn
|
||||
"""
|
||||
def non_target_name(self, name):
|
||||
if name and ('-native' in name):
|
||||
return True
|
||||
return False
|
||||
|
||||
"""
|
||||
Helper function to determine whether an item is an item specified by filter
|
||||
"""
|
||||
def tree_model_filter(self, model, it, filter):
|
||||
name = model.get_value(it, self.COL_NAME)
|
||||
if self.non_target_name(name):
|
||||
return False
|
||||
|
||||
for key in filter.keys():
|
||||
if key == self.COL_NAME:
|
||||
if filter[key] != 'Search recipes by name' and filter[key] != 'Search package groups by name':
|
||||
if filter[key] not in name:
|
||||
return False
|
||||
else:
|
||||
if model.get_value(it, key) not in filter[key]:
|
||||
return False
|
||||
self.filtered_nb += 1
|
||||
|
||||
return True
|
||||
|
||||
def exclude_item_sort_func(self, model, iter1, iter2, user_data=None):
|
||||
if user_data:
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
else:
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_FADE_INC)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_INC)
|
||||
return ((val1 == True) and (val2 == False))
|
||||
|
||||
def include_item_sort_func(self, model, iter1, iter2, user_data=None):
|
||||
if user_data:
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
else:
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_INC)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_INC)
|
||||
return ((val1 == False) and (val2 == True))
|
||||
|
||||
def sort_func(self, model, iter1, iter2, user_data):
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_NAME)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_NAME)
|
||||
return self.cmp_vals(val1, val2, user_data)
|
||||
|
||||
def cmp_vals(self, val1, val2, user_data):
|
||||
if val1 is None or val2 is None:
|
||||
return 0
|
||||
elif val1.startswith(user_data) and not val2.startswith(user_data):
|
||||
return -1
|
||||
elif not val1.startswith(user_data) and val2.startswith(user_data):
|
||||
return 1
|
||||
else:
|
||||
return cmp(val1, val2)
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=False, search_data=None, initial=False):
|
||||
model = self.filter_new()
|
||||
self.filtered_nb = 0
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
sort.connect ('sort-column-changed', self.sort_column_changed_cb)
|
||||
if initial:
|
||||
sort.set_sort_column_id(RecipeListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
elif excluded_items_ahead:
|
||||
sort.set_default_sort_func(self.exclude_item_sort_func, search_data)
|
||||
elif included_items_ahead:
|
||||
sort.set_default_sort_func(self.include_item_sort_func, search_data)
|
||||
else:
|
||||
if search_data and search_data!='Search recipes by name' and search_data!='Search package groups by name':
|
||||
sort.set_default_sort_func(self.sort_func, search_data)
|
||||
else:
|
||||
sort.set_sort_column_id(self.sort_column_id, self.sort_order)
|
||||
sort.set_default_sort_func(None)
|
||||
|
||||
sort.set_sort_func(RecipeListModel.COL_INC, self.sort_column, RecipeListModel.COL_INC)
|
||||
sort.set_sort_func(RecipeListModel.COL_GROUP, self.sort_column, RecipeListModel.COL_GROUP)
|
||||
sort.set_sort_func(RecipeListModel.COL_BINB, self.sort_binb_column)
|
||||
sort.set_sort_func(RecipeListModel.COL_LIC, self.sort_column, RecipeListModel.COL_LIC)
|
||||
return sort
|
||||
|
||||
def sort_column_changed_cb (self, data):
|
||||
self.sort_column_id, self.sort_order = data.get_sort_column_id ()
|
||||
|
||||
def sort_column(self, model, row1, row2, col):
|
||||
value1 = model.get_value(row1, col)
|
||||
value2 = model.get_value(row2, col)
|
||||
cmp_res = cmp(value1, value2)
|
||||
if cmp_res!=0:
|
||||
if col==RecipeListModel.COL_INC:
|
||||
return -cmp_res
|
||||
else:
|
||||
return cmp_res
|
||||
else:
|
||||
name1 = model.get_value(row1, RecipeListModel.COL_NAME)
|
||||
name2 = model.get_value(row2, RecipeListModel.COL_NAME)
|
||||
return cmp(name1,name2)
|
||||
|
||||
def sort_binb_column(self, model, row1, row2):
|
||||
value1 = model.get_value(row1, RecipeListModel.COL_BINB)
|
||||
value2 = model.get_value(row2, RecipeListModel.COL_BINB)
|
||||
value1_list = value1.split(', ')
|
||||
value2_list = value2.split(', ')
|
||||
|
||||
value1 = value1_list[0]
|
||||
value2 = value2_list[0]
|
||||
|
||||
cmp_res = cmp(value1, value2)
|
||||
if cmp_res==0:
|
||||
cmp_size = cmp(len(value1_list), len(value2_list))
|
||||
if cmp_size==0:
|
||||
name1 = model.get_value(row1, RecipeListModel.COL_NAME)
|
||||
name2 = model.get_value(row2, RecipeListModel.COL_NAME)
|
||||
return cmp(name1,name2)
|
||||
else:
|
||||
return cmp_size
|
||||
else:
|
||||
return cmp_res
|
||||
|
||||
def convert_vpath_to_path(self, view_model, view_path):
|
||||
filtered_model_path = view_model.convert_path_to_child_path(view_path)
|
||||
filtered_model = view_model.get_model()
|
||||
|
||||
# get the path of the original model
|
||||
path = filtered_model.convert_path_to_child_path(filtered_model_path)
|
||||
return path
|
||||
|
||||
def convert_path_to_vpath(self, view_model, path):
|
||||
it = view_model.get_iter_first()
|
||||
while it:
|
||||
name = self.find_item_for_path(path)
|
||||
view_name = view_model.get_value(it, RecipeListModel.COL_NAME)
|
||||
if view_name == name:
|
||||
view_path = view_model.get_path(it)
|
||||
return view_path
|
||||
it = view_model.iter_next(it)
|
||||
return None
|
||||
|
||||
"""
|
||||
The populate() function takes as input the data from a
|
||||
bb.event.TargetsTreeGenerated event and populates the RecipeList.
|
||||
"""
|
||||
def populate(self, event_model):
|
||||
# First clear the model, in case repopulating
|
||||
self.clear()
|
||||
|
||||
# dummy image for prompt
|
||||
self.set_in_list(self.__custom_image__, "Use 'Edit image recipe' to customize recipes and packages " \
|
||||
"to be included in your image ")
|
||||
|
||||
for item in event_model["pn"]:
|
||||
name = item
|
||||
desc = event_model["pn"][item]["description"]
|
||||
lic = event_model["pn"][item]["license"]
|
||||
group = event_model["pn"][item]["section"]
|
||||
inherits = event_model["pn"][item]["inherits"]
|
||||
summary = event_model["pn"][item]["summary"]
|
||||
version = event_model["pn"][item]["version"]
|
||||
revision = event_model["pn"][item]["prevision"]
|
||||
homepage = event_model["pn"][item]["homepage"]
|
||||
bugtracker = event_model["pn"][item]["bugtracker"]
|
||||
filename = event_model["pn"][item]["filename"]
|
||||
install = []
|
||||
|
||||
depends = event_model["depends"].get(item, []) + event_model["rdepends-pn"].get(item, [])
|
||||
|
||||
if ('packagegroup.bbclass' in " ".join(inherits)):
|
||||
atype = 'packagegroup'
|
||||
elif ('/image.bbclass' in " ".join(inherits)):
|
||||
if "edited" not in name:
|
||||
atype = 'image'
|
||||
install = event_model["rdepends-pkg"].get(item, []) + event_model["rrecs-pkg"].get(item, [])
|
||||
elif ('meta-' in name):
|
||||
atype = 'toolchain'
|
||||
elif (name == 'dummy-image' or name == 'dummy-toolchain'):
|
||||
atype = 'dummy'
|
||||
else:
|
||||
atype = 'recipe'
|
||||
|
||||
self.set(self.append(), self.COL_NAME, item, self.COL_DESC, desc,
|
||||
self.COL_LIC, lic, self.COL_GROUP, group,
|
||||
self.COL_DEPS, " ".join(depends), self.COL_BINB, "",
|
||||
self.COL_TYPE, atype, self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, " ".join(install), self.COL_PN, item,
|
||||
self.COL_SUMMARY, summary, self.COL_VERSION, version, self.COL_REVISION, revision,
|
||||
self.COL_HOMEPAGE, homepage, self.COL_BUGTRACKER, bugtracker,
|
||||
self.COL_FILE, filename)
|
||||
|
||||
self.pn_path = {}
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
pn = self.get_value(it, self.COL_NAME)
|
||||
path = self.get_path(it)
|
||||
self.pn_path[pn] = path
|
||||
it = self.iter_next(it)
|
||||
|
||||
def set_in_list(self, item, desc):
|
||||
self.set(self.append(), self.COL_NAME, item,
|
||||
self.COL_DESC, desc,
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, "", self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, item,
|
||||
self.COL_SUMMARY, "", self.COL_VERSION, "", self.COL_REVISION, "",
|
||||
self.COL_HOMEPAGE, "", self.COL_BUGTRACKER, "")
|
||||
self.pn_path = {}
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
pn = self.get_value(it, self.COL_NAME)
|
||||
path = self.get_path(it)
|
||||
self.pn_path[pn] = path
|
||||
it = self.iter_next(it)
|
||||
|
||||
"""
|
||||
Update the model, send out the notification.
|
||||
"""
|
||||
def selection_change_notification(self):
|
||||
self.emit("recipe-selection-changed")
|
||||
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
def include_item(self, item_path, binb="", image_contents=False):
|
||||
if self.path_included(item_path):
|
||||
return
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
|
||||
self[item_path][self.COL_INC] = True
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if binb and not binb in item_bin:
|
||||
item_bin.append(binb)
|
||||
self[item_path][self.COL_BINB] = ', '.join(item_bin).lstrip(', ')
|
||||
|
||||
# We want to do some magic with things which are brought in by the
|
||||
# base image so tag them as so
|
||||
if image_contents:
|
||||
self[item_path][self.COL_IMG] = True
|
||||
|
||||
if item_deps:
|
||||
# Ensure all of the items deps are included and, where appropriate,
|
||||
# add this item to their COL_BINB
|
||||
for dep in item_deps.split(" "):
|
||||
# If the contents model doesn't already contain dep, add it
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_included = self.path_included(dep_path)
|
||||
|
||||
if dep_included and not dep in item_bin:
|
||||
# don't set the COL_BINB to this item if the target is an
|
||||
# item in our own COL_BINB
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if not item_name in dep_bin:
|
||||
dep_bin.append(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
dep_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if self[item_path][self.COL_NAME] in dep_bin:
|
||||
dep_bin.remove(self[item_path][self.COL_NAME])
|
||||
self[item_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
return
|
||||
|
||||
self[item_path][self.COL_INC] = False
|
||||
|
||||
item_name = self[item_path][self.COL_NAME]
|
||||
item_deps = self[item_path][self.COL_DEPS]
|
||||
if item_deps:
|
||||
for dep in item_deps.split(" "):
|
||||
dep_path = self.find_path_for_item(dep)
|
||||
if not dep_path:
|
||||
continue
|
||||
dep_bin = self[dep_path][self.COL_BINB].split(', ')
|
||||
if item_name in dep_bin:
|
||||
dep_bin.remove(item_name)
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
item_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if item_bin:
|
||||
for binb in item_bin:
|
||||
binb_path = self.find_path_for_item(binb)
|
||||
if not binb_path:
|
||||
continue
|
||||
self.exclude_item(binb_path)
|
||||
|
||||
def reset(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
self.set(it,
|
||||
self.COL_INC, False,
|
||||
self.COL_BINB, "",
|
||||
self.COL_IMG, False)
|
||||
it = self.iter_next(it)
|
||||
|
||||
self.selection_change_notification()
|
||||
|
||||
"""
|
||||
Returns two lists. One of user selected recipes and the other containing
|
||||
all selected recipes
|
||||
"""
|
||||
def get_selected_recipes(self):
|
||||
allrecipes = []
|
||||
userrecipes = []
|
||||
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type != "image":
|
||||
allrecipes.append(name)
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
userrecipes.append(name)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(userrecipes)), list(set(allrecipes))
|
||||
|
||||
def set_selected_recipes(self, recipelist):
|
||||
for pn in recipelist:
|
||||
if pn in self.pn_path.keys():
|
||||
path = self.pn_path[pn]
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
self.selection_change_notification()
|
||||
|
||||
def get_selected_image(self):
|
||||
it = self.get_iter_first()
|
||||
while it:
|
||||
if self.get_value(it, self.COL_INC):
|
||||
name = self.get_value(it, self.COL_PN)
|
||||
type = self.get_value(it, self.COL_TYPE)
|
||||
if type == "image":
|
||||
sel = "User Selected" in self.get_value(it, self.COL_BINB)
|
||||
if sel:
|
||||
return name
|
||||
it = self.iter_next(it)
|
||||
return None
|
||||
|
||||
def set_selected_image(self, img):
|
||||
if not img:
|
||||
return
|
||||
self.reset()
|
||||
path = self.find_path_for_item(img)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected",
|
||||
image_contents=True)
|
||||
self.selection_change_notification()
|
||||
|
||||
def set_custom_image_version(self, version):
|
||||
self.custom_image_version = version
|
||||
|
||||
def get_custom_image_version(self):
|
||||
return self.custom_image_version
|
||||
|
||||
def is_custom_image(self):
|
||||
return self.get_selected_image() == self.__custom_image__
|
||||
128
bitbake/lib/bb/ui/crumbs/hobpages.py
Executable file
128
bitbake/lib/bb/ui/crumbs/hobpages.py
Executable file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hwc
|
||||
|
||||
#
|
||||
# HobPage: the super class for all Hob-related pages
|
||||
#
|
||||
class HobPage (gtk.VBox):
|
||||
|
||||
def __init__(self, builder, title = None):
|
||||
super(HobPage, self).__init__(False, 0)
|
||||
self.builder = builder
|
||||
self.builder_width, self.builder_height = self.builder.size_request()
|
||||
|
||||
if not title:
|
||||
self.title = "Hob -- Image Creator"
|
||||
else:
|
||||
self.title = title
|
||||
self.title_label = gtk.Label()
|
||||
|
||||
self.box_group_area = gtk.VBox(False, 12)
|
||||
self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
|
||||
self.group_align = gtk.Alignment(xalign = 0, yalign=0.5, xscale=1, yscale=1)
|
||||
self.group_align.set_padding(15, 15, 73, 73)
|
||||
self.group_align.add(self.box_group_area)
|
||||
self.box_group_area.set_homogeneous(False)
|
||||
|
||||
def set_title(self, title):
|
||||
self.title = title
|
||||
self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
|
||||
def add_onto_top_bar(self, widget = None, padding = 0):
|
||||
# the top button occupies 1/7 of the page height
|
||||
# setup an event box
|
||||
eventbox = gtk.EventBox()
|
||||
style = eventbox.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = eventbox.get_colormap().alloc_color(HobColors.LIGHT_GRAY, False, False)
|
||||
eventbox.set_style(style)
|
||||
eventbox.set_size_request(-1, 88)
|
||||
|
||||
hbox = gtk.HBox()
|
||||
|
||||
self.title_label = gtk.Label()
|
||||
self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
hbox.pack_start(self.title_label, expand=False, fill=False, padding=20)
|
||||
|
||||
if widget:
|
||||
# add the widget in the event box
|
||||
hbox.pack_end(widget, expand=False, fill=False, padding=padding)
|
||||
eventbox.add(hbox)
|
||||
|
||||
return eventbox
|
||||
|
||||
def span_tag(self, size="medium", weight="normal", forground="#1c1c1c"):
|
||||
span_tag = "weight='%s' foreground='%s' size='%s'" % (weight, forground, size)
|
||||
return span_tag
|
||||
|
||||
def append_toolbar_button(self, toolbar, buttonname, icon_disp, icon_hovor, tip, cb):
|
||||
# Create a button and append it on the toolbar according to button name
|
||||
icon = gtk.Image()
|
||||
icon_display = icon_disp
|
||||
icon_hover = icon_hovor
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(icon_display)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
tip_text = tip
|
||||
button = toolbar.append_item(buttonname, tip, None, icon, cb)
|
||||
return button
|
||||
|
||||
@staticmethod
|
||||
def _size_to_string(size):
|
||||
try:
|
||||
if not size:
|
||||
size_str = "0 B"
|
||||
else:
|
||||
if len(str(int(size))) > 6:
|
||||
size_str = '%.1f' % (size*1.0/(1024*1024)) + ' MB'
|
||||
elif len(str(int(size))) > 3:
|
||||
size_str = '%.1f' % (size*1.0/1024) + ' KB'
|
||||
else:
|
||||
size_str = str(size) + ' B'
|
||||
except:
|
||||
size_str = "0 B"
|
||||
return size_str
|
||||
|
||||
@staticmethod
|
||||
def _string_to_size(str_size):
|
||||
try:
|
||||
if not str_size:
|
||||
size = 0
|
||||
else:
|
||||
unit = str_size.split()
|
||||
if len(unit) > 1:
|
||||
if unit[1] == 'MB':
|
||||
size = float(unit[0])*1024*1024
|
||||
elif unit[1] == 'KB':
|
||||
size = float(unit[0])*1024
|
||||
elif unit[1] == 'B':
|
||||
size = float(unit[0])
|
||||
else:
|
||||
size = 0
|
||||
else:
|
||||
size = float(unit[0])
|
||||
except:
|
||||
size = 0
|
||||
return size
|
||||
|
||||
561
bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
Normal file
561
bitbake/lib/bb/ui/crumbs/imageconfigurationpage.py
Normal file
@@ -0,0 +1,561 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
import re
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hoblistmodel import RecipeListModel
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hig.retrieveimagedialog import RetrieveImageDialog
|
||||
|
||||
#
|
||||
# ImageConfigurationPage
|
||||
#
|
||||
class ImageConfigurationPage (HobPage):
|
||||
|
||||
__dummy_machine__ = "--select a machine--"
|
||||
__dummy_image__ = "--select an image recipe--"
|
||||
__custom_image__ = "Select from my image recipes"
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
|
||||
|
||||
self.image_combo_id = None
|
||||
# we use machine_combo_changed_by_manual to identify the machine is changed by code
|
||||
# or by manual. If by manual, all user's recipe selection and package selection are
|
||||
# cleared.
|
||||
self.machine_combo_changed_by_manual = True
|
||||
self.stopping = False
|
||||
self.warning_shift = 0
|
||||
self.custom_image_selected = None
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
# create visual elements
|
||||
self.toolbar = gtk.Toolbar()
|
||||
self.toolbar.set_orientation(gtk.ORIENTATION_HORIZONTAL)
|
||||
self.toolbar.set_style(gtk.TOOLBAR_BOTH)
|
||||
|
||||
my_images_button = self.append_toolbar_button(self.toolbar,
|
||||
"Images",
|
||||
hic.ICON_IMAGES_DISPLAY_FILE,
|
||||
hic.ICON_IMAGES_HOVER_FILE,
|
||||
"Open previously built images",
|
||||
self.my_images_button_clicked_cb)
|
||||
settings_button = self.append_toolbar_button(self.toolbar,
|
||||
"Settings",
|
||||
hic.ICON_SETTINGS_DISPLAY_FILE,
|
||||
hic.ICON_SETTINGS_HOVER_FILE,
|
||||
"View additional build settings",
|
||||
self.settings_button_clicked_cb)
|
||||
|
||||
self.config_top_button = self.add_onto_top_bar(self.toolbar)
|
||||
|
||||
self.gtable = gtk.Table(40, 40, True)
|
||||
self.create_config_machine()
|
||||
self.create_config_baseimg()
|
||||
self.config_build_button = self.create_config_build_button()
|
||||
|
||||
def _remove_all_widget(self):
|
||||
children = self.gtable.get_children() or []
|
||||
for child in children:
|
||||
self.gtable.remove(child)
|
||||
children = self.box_group_area.get_children() or []
|
||||
for child in children:
|
||||
self.box_group_area.remove(child)
|
||||
children = self.get_children() or []
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def _pack_components(self, pack_config_build_button = False):
|
||||
self._remove_all_widget()
|
||||
self.pack_start(self.config_top_button, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.gtable, expand=True, fill=True)
|
||||
if pack_config_build_button:
|
||||
self.box_group_area.pack_end(self.config_build_button, expand=False, fill=False)
|
||||
else:
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
subbox = gtk.HBox(False, 0)
|
||||
subbox.set_size_request(205, 49)
|
||||
subbox.show()
|
||||
box.add(subbox)
|
||||
self.box_group_area.pack_end(box, False, False)
|
||||
|
||||
def show_machine(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.show_all()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
if self.stopping == False:
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_text(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def show_info_populating(self):
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = True)
|
||||
self.show_all()
|
||||
|
||||
def show_info_populated(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = False)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.set_config_baseimg_layout()
|
||||
self.show_all()
|
||||
|
||||
def show_baseimg_selected(self):
|
||||
self.progress_bar.reset()
|
||||
self._pack_components(pack_config_build_button = True)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.set_config_baseimg_layout()
|
||||
self.show_all()
|
||||
if self.builder.recipe_model.get_selected_image() == self.builder.recipe_model.__custom_image__:
|
||||
self.just_bake_button.hide()
|
||||
|
||||
def add_warnings_bar(self):
|
||||
#create the warnings bar shown when recipes parsing generates warnings
|
||||
color = HobColors.KHAKI
|
||||
warnings_bar = gtk.EventBox()
|
||||
warnings_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
warnings_bar.set_flags(gtk.CAN_DEFAULT)
|
||||
warnings_bar.grab_default()
|
||||
|
||||
build_stop_tab = gtk.Table(10, 20, True)
|
||||
warnings_bar.add(build_stop_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ALERT_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_stop_tab.attach(icon, 0, 2, 0, 10)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
warnings_nb = len(self.builder.parsing_warnings)
|
||||
if warnings_nb == 1:
|
||||
label.set_markup("<span size='x-large'><b>1 recipe parsing warning</b></span>")
|
||||
else:
|
||||
label.set_markup("<span size='x-large'><b>%s recipe parsing warnings</b></span>" % warnings_nb)
|
||||
build_stop_tab.attach(label, 2, 12, 0, 10)
|
||||
|
||||
view_warnings_button = HobButton("View warnings")
|
||||
view_warnings_button.connect('clicked', self.view_warnings_button_clicked_cb)
|
||||
build_stop_tab.attach(view_warnings_button, 15, 19, 1, 9)
|
||||
|
||||
return warnings_bar
|
||||
|
||||
def disable_warnings_bar(self):
|
||||
if self.builder.parsing_warnings:
|
||||
if hasattr(self, 'warnings_bar'):
|
||||
self.warnings_bar.hide_all()
|
||||
self.builder.parsing_warnings = []
|
||||
|
||||
def create_config_machine(self):
|
||||
self.machine_title = gtk.Label()
|
||||
self.machine_title.set_alignment(0.0, 0.5)
|
||||
mark = "<span %s>Select a machine</span>" % self.span_tag('x-large', 'bold')
|
||||
self.machine_title.set_markup(mark)
|
||||
|
||||
self.machine_title_desc = gtk.Label()
|
||||
self.machine_title_desc.set_alignment(0.0, 0.5)
|
||||
mark = ("<span %s>Your selection is the profile of the target machine for which you"
|
||||
" are building the image.\n</span>") % (self.span_tag('medium'))
|
||||
self.machine_title_desc.set_markup(mark)
|
||||
|
||||
self.machine_combo = gtk.combo_box_new_text()
|
||||
self.machine_combo.connect("changed", self.machine_combo_changed_cb)
|
||||
|
||||
icon_file = hic.ICON_LAYERS_DISPLAY_FILE
|
||||
hover_file = hic.ICON_LAYERS_HOVER_FILE
|
||||
self.layer_button = HobImageButton("Layers", "Add support for machines, software, etc.",
|
||||
icon_file, hover_file)
|
||||
self.layer_button.connect("clicked", self.layer_button_clicked_cb)
|
||||
|
||||
markup = "Layers are a powerful mechanism to extend the Yocto Project "
|
||||
markup += "with your own functionality.\n"
|
||||
markup += "For more on layers, check the <a href=\""
|
||||
markup += "http://www.yoctoproject.org/docs/current/dev-manual/"
|
||||
markup += "dev-manual.html#understanding-and-using-layers\">reference manual</a>."
|
||||
self.layer_info_icon = HobInfoButton("<b>Layers</b>" + "*" + markup, self.get_parent())
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
self.machine_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_machine_layout(self, show_progress_bar = False):
|
||||
self.gtable.attach(self.machine_title, 0, 40, 0, 4)
|
||||
self.gtable.attach(self.machine_title_desc, 0, 40, 4, 6)
|
||||
self.gtable.attach(self.machine_combo, 0, 12, 7, 10)
|
||||
self.gtable.attach(self.layer_button, 14, 36, 7, 12)
|
||||
self.gtable.attach(self.layer_info_icon, 36, 40, 7, 11)
|
||||
if show_progress_bar:
|
||||
#self.gtable.attach(self.progress_box, 0, 40, 15, 18)
|
||||
self.gtable.attach(self.progress_bar, 0, 37, 15, 18)
|
||||
self.gtable.attach(self.stop_button, 37, 40, 15, 18, 0, 0)
|
||||
if self.builder.parsing_warnings:
|
||||
self.warnings_bar = self.add_warnings_bar()
|
||||
self.gtable.attach(self.warnings_bar, 0, 40, 14, 18)
|
||||
self.warning_shift = 4
|
||||
else:
|
||||
self.warning_shift = 0
|
||||
self.gtable.attach(self.machine_separator, 0, 40, 13, 14)
|
||||
|
||||
def create_config_baseimg(self):
|
||||
self.image_title = gtk.Label()
|
||||
self.image_title.set_alignment(0, 1.0)
|
||||
mark = "<span %s>Select an image recipe</span>" % self.span_tag('x-large', 'bold')
|
||||
self.image_title.set_markup(mark)
|
||||
|
||||
self.image_title_desc = gtk.Label()
|
||||
self.image_title_desc.set_alignment(0, 0.5)
|
||||
|
||||
mark = ("<span %s>Image recipes are a starting point for the type of image you want. "
|
||||
"You can build them as \n"
|
||||
"they are or edit them to suit your needs.\n</span>") % self.span_tag('medium')
|
||||
self.image_title_desc.set_markup(mark)
|
||||
|
||||
self.image_combo = gtk.combo_box_new_text()
|
||||
self.image_combo.set_row_separator_func(self.combo_separator_func, None)
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
self.image_desc = gtk.Label()
|
||||
self.image_desc.set_alignment(0.0, 0.5)
|
||||
self.image_desc.set_size_request(256, -1)
|
||||
self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
|
||||
self.image_desc.set_line_wrap(True)
|
||||
|
||||
# button to view recipes
|
||||
icon_file = hic.ICON_RCIPE_DISPLAY_FILE
|
||||
hover_file = hic.ICON_RCIPE_HOVER_FILE
|
||||
self.view_adv_configuration_button = HobImageButton("Advanced configuration",
|
||||
"Select image types, package formats, etc",
|
||||
icon_file, hover_file)
|
||||
self.view_adv_configuration_button.connect("clicked", self.view_adv_configuration_button_clicked_cb)
|
||||
|
||||
self.image_separator = gtk.HSeparator()
|
||||
|
||||
def combo_separator_func(self, model, iter, user_data):
|
||||
name = model.get_value(iter, 0)
|
||||
if name == "--Separator--":
|
||||
return True
|
||||
|
||||
def set_config_baseimg_layout(self):
|
||||
self.gtable.attach(self.image_title, 0, 40, 15+self.warning_shift, 17+self.warning_shift)
|
||||
self.gtable.attach(self.image_title_desc, 0, 40, 18+self.warning_shift, 22+self.warning_shift)
|
||||
self.gtable.attach(self.image_combo, 0, 12, 23+self.warning_shift, 26+self.warning_shift)
|
||||
self.gtable.attach(self.image_desc, 0, 12, 27+self.warning_shift, 33+self.warning_shift)
|
||||
self.gtable.attach(self.view_adv_configuration_button, 14, 36, 23+self.warning_shift, 28+self.warning_shift)
|
||||
self.gtable.attach(self.image_separator, 0, 40, 35+self.warning_shift, 36+self.warning_shift)
|
||||
|
||||
def create_config_build_button(self):
|
||||
# Create the "Build packages" and "Build image" buttons at the bottom
|
||||
button_box = gtk.HBox(False, 6)
|
||||
|
||||
# create button "Build image"
|
||||
self.just_bake_button = HobButton("Build image")
|
||||
self.just_bake_button.set_tooltip_text("Build the image recipe as it is")
|
||||
self.just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
|
||||
button_box.pack_end(self.just_bake_button, expand=False, fill=False)
|
||||
|
||||
# create button "Edit image recipe"
|
||||
self.edit_image_button = HobAltButton("Edit image recipe")
|
||||
self.edit_image_button.set_tooltip_text("Customize the recipes and packages to be included in your image")
|
||||
self.edit_image_button.connect("clicked", self.edit_image_button_clicked_cb)
|
||||
button_box.pack_end(self.edit_image_button, expand=False, fill=False)
|
||||
|
||||
return button_box
|
||||
|
||||
def stop_button_clicked_cb(self, button):
|
||||
self.stopping = True
|
||||
self.progress_bar.set_text("Stopping recipe parsing")
|
||||
self.progress_bar.set_rcstyle("stop")
|
||||
self.builder.cancel_parse_sync()
|
||||
|
||||
def view_warnings_button_clicked_cb(self, button):
|
||||
self.builder.show_warning_dialog()
|
||||
|
||||
def machine_combo_changed_idle_cb(self):
|
||||
self.builder.window.set_cursor(None)
|
||||
|
||||
def machine_combo_changed_cb(self, machine_combo):
|
||||
self.stopping = False
|
||||
self.builder.parsing_warnings = []
|
||||
combo_item = machine_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_machine__:
|
||||
return
|
||||
|
||||
self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
|
||||
self.builder.wait(0.1) #wait for combo and cursor to update
|
||||
|
||||
# remove __dummy_machine__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = machine_combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
|
||||
machine_combo.remove_text(0)
|
||||
|
||||
self.builder.configuration.curr_mach = combo_item
|
||||
if self.machine_combo_changed_by_manual:
|
||||
self.builder.configuration.clear_selection()
|
||||
# reset machine_combo_changed_by_manual
|
||||
self.machine_combo_changed_by_manual = True
|
||||
|
||||
self.builder.configuration.selected_image = None
|
||||
|
||||
# Do reparse recipes
|
||||
self.builder.populate_recipe_package_info_async()
|
||||
|
||||
glib.idle_add(self.machine_combo_changed_idle_cb)
|
||||
|
||||
def update_machine_combo(self):
|
||||
self.disable_warnings_bar()
|
||||
all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
|
||||
|
||||
model = self.machine_combo.get_model()
|
||||
model.clear()
|
||||
for machine in all_machines:
|
||||
self.machine_combo.append_text(machine)
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def switch_machine_combo(self):
|
||||
self.disable_warnings_bar()
|
||||
self.machine_combo_changed_by_manual = False
|
||||
model = self.machine_combo.get_model()
|
||||
active = 0
|
||||
while active < len(model):
|
||||
if model[active][0] == self.builder.configuration.curr_mach:
|
||||
self.machine_combo.set_active(active)
|
||||
return
|
||||
active += 1
|
||||
|
||||
if model[0][0] != self.__dummy_machine__:
|
||||
self.machine_combo.insert_text(0, self.__dummy_machine__)
|
||||
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def update_image_desc(self):
|
||||
desc = ""
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
desc = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_DESC)
|
||||
|
||||
mark = ("<span %s>%s</span>\n") % (self.span_tag('small'), desc)
|
||||
self.image_desc.set_markup(mark)
|
||||
|
||||
def image_combo_changed_idle_cb(self, selected_image, selected_recipes, selected_packages):
|
||||
self.builder.update_recipe_model(selected_image, selected_recipes)
|
||||
self.builder.update_package_model(selected_packages)
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
def image_combo_changed_cb(self, combo):
|
||||
self.builder.window_sensitive(False)
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if selected_image == self.__custom_image__:
|
||||
topdir = self.builder.get_topdir()
|
||||
images_dir = topdir + "/recipes/images/custom/"
|
||||
self.builder.ensure_dir(images_dir)
|
||||
|
||||
dialog = RetrieveImageDialog(images_dir, "Select from my image recipes",
|
||||
self.builder, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_OK:
|
||||
image_name = dialog.get_filename()
|
||||
head, tail = os.path.split(image_name)
|
||||
selected_image = os.path.splitext(tail)[0]
|
||||
self.custom_image_selected = selected_image
|
||||
self.update_image_combo(self.builder.recipe_model, selected_image)
|
||||
else:
|
||||
selected_image = self.__dummy_image__
|
||||
self.update_image_combo(self.builder.recipe_model, None)
|
||||
dialog.destroy()
|
||||
else:
|
||||
if self.custom_image_selected:
|
||||
self.custom_image_selected = None
|
||||
self.update_image_combo(self.builder.recipe_model, selected_image)
|
||||
|
||||
if not selected_image or (selected_image == self.__dummy_image__):
|
||||
self.builder.window_sensitive(True)
|
||||
self.just_bake_button.hide()
|
||||
self.edit_image_button.hide()
|
||||
return
|
||||
|
||||
# remove __dummy_image__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
|
||||
combo.remove_text(0)
|
||||
|
||||
self.builder.customized = False
|
||||
|
||||
selected_recipes = []
|
||||
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
|
||||
self.update_image_desc()
|
||||
|
||||
self.builder.recipe_model.reset()
|
||||
self.builder.package_model.reset()
|
||||
|
||||
self.show_baseimg_selected()
|
||||
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
self.just_bake_button.hide()
|
||||
|
||||
glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
|
||||
|
||||
def _image_combo_connect_signal(self):
|
||||
if not self.image_combo_id:
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
def _image_combo_disconnect_signal(self):
|
||||
if self.image_combo_id:
|
||||
self.image_combo.disconnect(self.image_combo_id)
|
||||
self.image_combo_id = None
|
||||
|
||||
def update_image_combo(self, recipe_model, selected_image):
|
||||
# Update the image combo according to the images in the recipe_model
|
||||
# populate image combo
|
||||
filter = {RecipeListModel.COL_TYPE : ['image']}
|
||||
image_model = recipe_model.tree_model(filter)
|
||||
image_model.set_sort_column_id(recipe_model.COL_NAME, gtk.SORT_ASCENDING)
|
||||
active = 0
|
||||
cnt = 0
|
||||
|
||||
white_pattern = []
|
||||
if self.builder.parameters.image_white_pattern:
|
||||
for i in self.builder.parameters.image_white_pattern.split():
|
||||
white_pattern.append(re.compile(i))
|
||||
|
||||
black_pattern = []
|
||||
if self.builder.parameters.image_black_pattern:
|
||||
for i in self.builder.parameters.image_black_pattern.split():
|
||||
black_pattern.append(re.compile(i))
|
||||
black_pattern.append(re.compile("hob-image"))
|
||||
black_pattern.append(re.compile("edited(-[0-9]*)*.bb$"))
|
||||
|
||||
it = image_model.get_iter_first()
|
||||
self._image_combo_disconnect_signal()
|
||||
model = self.image_combo.get_model()
|
||||
model.clear()
|
||||
# Set a indicator text to combo store when first open
|
||||
if not selected_image:
|
||||
self.image_combo.append_text(self.__dummy_image__)
|
||||
cnt = cnt + 1
|
||||
|
||||
self.image_combo.append_text(self.__custom_image__)
|
||||
self.image_combo.append_text("--Separator--")
|
||||
cnt = cnt + 2
|
||||
|
||||
topdir = self.builder.get_topdir()
|
||||
# append and set active
|
||||
while it:
|
||||
path = image_model.get_path(it)
|
||||
it = image_model.iter_next(it)
|
||||
image_name = image_model[path][recipe_model.COL_NAME]
|
||||
if image_name == self.builder.recipe_model.__custom_image__:
|
||||
continue
|
||||
|
||||
if black_pattern:
|
||||
allow = True
|
||||
for pattern in black_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = False
|
||||
break
|
||||
elif white_pattern:
|
||||
allow = False
|
||||
for pattern in white_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = True
|
||||
break
|
||||
else:
|
||||
allow = True
|
||||
|
||||
file_name = image_model[path][recipe_model.COL_FILE]
|
||||
if file_name and topdir in file_name:
|
||||
allow = False
|
||||
|
||||
if allow:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
|
||||
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
active = cnt
|
||||
|
||||
if self.custom_image_selected:
|
||||
self.image_combo.append_text("--Separator--")
|
||||
self.image_combo.append_text(self.custom_image_selected)
|
||||
cnt = cnt + 2
|
||||
if self.custom_image_selected == selected_image:
|
||||
active = cnt
|
||||
|
||||
self.image_combo.set_active(active)
|
||||
|
||||
if active != 0:
|
||||
self.show_baseimg_selected()
|
||||
|
||||
self._image_combo_connect_signal()
|
||||
|
||||
def layer_button_clicked_cb(self, button):
|
||||
# Create a layer selection dialog
|
||||
self.builder.show_layer_selection_dialog()
|
||||
|
||||
def view_adv_configuration_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
self.builder.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
|
||||
self.builder.wait(0.1) #wait for adv_settings_dialog to terminate
|
||||
self.builder.reparse_post_adv_settings()
|
||||
self.builder.window.set_cursor(None)
|
||||
|
||||
def just_bake_button_clicked_cb(self, button):
|
||||
self.builder.parsing_warnings = []
|
||||
self.builder.just_bake()
|
||||
|
||||
def edit_image_button_clicked_cb(self, button):
|
||||
self.builder.set_base_image()
|
||||
self.builder.show_recipes()
|
||||
|
||||
def my_images_button_clicked_cb(self, button):
|
||||
self.builder.show_load_my_images_dialog()
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_simple_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
self.builder.reparse_post_adv_settings()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user