mirror of
https://git.yoctoproject.org/poky
synced 2026-02-01 06:18:43 +01:00
Compare commits
383 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6717d19848 | ||
|
|
aeb31e09f0 | ||
|
|
26f0c306cb | ||
|
|
d65bcef0ae | ||
|
|
f1729cdbfa | ||
|
|
925e46623a | ||
|
|
b8603a494d | ||
|
|
dc2ee2feb7 | ||
|
|
9901828b0d | ||
|
|
ad89b1fe80 | ||
|
|
ff505d781d | ||
|
|
905938d0a1 | ||
|
|
1b58412449 | ||
|
|
2cd70c9150 | ||
|
|
9e9bb64fb5 | ||
|
|
39ec0738d4 | ||
|
|
3d7df7b5b5 | ||
|
|
0b1be9dc67 | ||
|
|
b9ec9f7425 | ||
|
|
9e4aad97c3 | ||
|
|
240da75616 | ||
|
|
b2ba41b575 | ||
|
|
56bd68e82c | ||
|
|
0fb598c6b9 | ||
|
|
b0c1820261 | ||
|
|
ccd470ba5f | ||
|
|
90a33dde44 | ||
|
|
b9da1f441b | ||
|
|
ccbb7ef72f | ||
|
|
cafdccb29c | ||
|
|
13eda67126 | ||
|
|
91c507ce1c | ||
|
|
97e9be8130 | ||
|
|
523aaea8e2 | ||
|
|
e625a82af2 | ||
|
|
efde5a1303 | ||
|
|
f1bb6acacc | ||
|
|
7effa6edd0 | ||
|
|
02e603e48c | ||
|
|
2d80a6bc8a | ||
|
|
159f66aea7 | ||
|
|
6b8f7999c3 | ||
|
|
827dc7f12c | ||
|
|
7c0d759c55 | ||
|
|
9ca89fe495 | ||
|
|
c82164fd0a | ||
|
|
586a3d5ff5 | ||
|
|
7849633469 | ||
|
|
367b862d59 | ||
|
|
c4e9d9d9ae | ||
|
|
7a43fb95d1 | ||
|
|
46e8377c42 | ||
|
|
148b7d20d4 | ||
|
|
d759301a34 | ||
|
|
134246d3d4 | ||
|
|
c088bac2f0 | ||
|
|
9766c76268 | ||
|
|
78b1cbcc72 | ||
|
|
3a4ee6bfd9 | ||
|
|
9bb6f7f3f0 | ||
|
|
15919f7e76 | ||
|
|
1e216c8087 | ||
|
|
a67b95ade2 | ||
|
|
1e668ccf1a | ||
|
|
e3dd621197 | ||
|
|
30b8d9378b | ||
|
|
895c86d71d | ||
|
|
07a7905689 | ||
|
|
e76d790bbf | ||
|
|
b60383c1b9 | ||
|
|
19dc8bf950 | ||
|
|
6c576a4ac8 | ||
|
|
9f5ea81070 | ||
|
|
51a5a5df84 | ||
|
|
6c9eb8a67a | ||
|
|
b8e521809b | ||
|
|
994b637d58 | ||
|
|
a85328245d | ||
|
|
61da1b1197 | ||
|
|
3428e70035 | ||
|
|
dbf99ab134 | ||
|
|
18d859a8c9 | ||
|
|
313864bf52 | ||
|
|
e93f9a8382 | ||
|
|
df4a397df9 | ||
|
|
9b3389e023 | ||
|
|
4b22a21b4e | ||
|
|
27a877becf | ||
|
|
2d5bd89565 | ||
|
|
bda51ee782 | ||
|
|
5dd12beccd | ||
|
|
1d21eaf4e0 | ||
|
|
95821e8566 | ||
|
|
57138de0fc | ||
|
|
57a806cc32 | ||
|
|
452619ba41 | ||
|
|
8d28013312 | ||
|
|
570345adfd | ||
|
|
9062377624 | ||
|
|
0143097095 | ||
|
|
4b1b580749 | ||
|
|
65ed47e597 | ||
|
|
608ac7794f | ||
|
|
4dada3c092 | ||
|
|
43903a5bfd | ||
|
|
e464615684 | ||
|
|
7a12eda785 | ||
|
|
d88fa68141 | ||
|
|
3a93bfe1c7 | ||
|
|
9fee4d138b | ||
|
|
87e924e377 | ||
|
|
8de7e102cf | ||
|
|
6780f20525 | ||
|
|
1d04721fe8 | ||
|
|
a4d8015687 | ||
|
|
7b57145498 | ||
|
|
38b1b68923 | ||
|
|
ec3c8fcf81 | ||
|
|
b2f045c400 | ||
|
|
87671f72e7 | ||
|
|
b028947d67 | ||
|
|
f5847d4f24 | ||
|
|
59198004c0 | ||
|
|
984be33145 | ||
|
|
96ee64c96b | ||
|
|
7a8f9114bc | ||
|
|
21ac977e2a | ||
|
|
33a4425d6d | ||
|
|
8abf510a13 | ||
|
|
3ff30c0bfb | ||
|
|
7e68f57dec | ||
|
|
3b998b3f4c | ||
|
|
d8155f1f95 | ||
|
|
e378410fb2 | ||
|
|
3a1d9e9e11 | ||
|
|
26dcc54c60 | ||
|
|
8e05d5e3fe | ||
|
|
5c1f10f56e | ||
|
|
0843e07873 | ||
|
|
793afb3e81 | ||
|
|
aebbf8c8f3 | ||
|
|
c4f1f0f491 | ||
|
|
810dd79720 | ||
|
|
b0ce70ffa8 | ||
|
|
ac2d94b684 | ||
|
|
ce2336ddc7 | ||
|
|
5b8c5ea151 | ||
|
|
7a42bfecc2 | ||
|
|
26db62e359 | ||
|
|
18224a4a46 | ||
|
|
6d0ae0ef44 | ||
|
|
153787d4df | ||
|
|
c55dea6a82 | ||
|
|
4ab29fc58f | ||
|
|
0bc0ee66a8 | ||
|
|
db6819b0c3 | ||
|
|
bc3484e76c | ||
|
|
9d84b2440d | ||
|
|
f8e61ed564 | ||
|
|
1db22d39b5 | ||
|
|
b7bf8bb051 | ||
|
|
839892ed27 | ||
|
|
232af2ec04 | ||
|
|
6a6bd2e96b | ||
|
|
3103f04a30 | ||
|
|
bb27ca7562 | ||
|
|
7dcd9a6b72 | ||
|
|
a43dba8c29 | ||
|
|
d52b91316e | ||
|
|
efbf15ce20 | ||
|
|
3caae900f3 | ||
|
|
3428e6e0e4 | ||
|
|
96ca984621 | ||
|
|
8386f4203d | ||
|
|
b4b50e52d2 | ||
|
|
6101dd2b4c | ||
|
|
ebf62ba85d | ||
|
|
9deb3333b0 | ||
|
|
5bb9a05e0f | ||
|
|
9ee3f77ed9 | ||
|
|
a714cf8700 | ||
|
|
d376e31c92 | ||
|
|
333e5f7076 | ||
|
|
a2fa51bdde | ||
|
|
cb468dfaf0 | ||
|
|
d4c5f12601 | ||
|
|
7e6902963f | ||
|
|
c166a5add3 | ||
|
|
303d17ac3c | ||
|
|
28938930ba | ||
|
|
bff6db6712 | ||
|
|
62b1fef787 | ||
|
|
fc9229e4ba | ||
|
|
8509c1a7e5 | ||
|
|
05d751c23a | ||
|
|
b8f6c7c794 | ||
|
|
474ea6b826 | ||
|
|
21d15fac0e | ||
|
|
e98512e1e3 | ||
|
|
1f80e7f675 | ||
|
|
6a4a66aabb | ||
|
|
c03bb4d0c7 | ||
|
|
f91b780b1a | ||
|
|
938e925356 | ||
|
|
c899777010 | ||
|
|
afb6a3688f | ||
|
|
4209379cc8 | ||
|
|
6add5ac648 | ||
|
|
af515ca686 | ||
|
|
f9f97a1fed | ||
|
|
48169ac9bc | ||
|
|
f091b8a3cf | ||
|
|
38083d01e7 | ||
|
|
278c551168 | ||
|
|
83d1ce9e27 | ||
|
|
d44881fecc | ||
|
|
948b8461e8 | ||
|
|
7bcc609bf0 | ||
|
|
f372806546 | ||
|
|
2361a8171b | ||
|
|
ee4d106987 | ||
|
|
896511d564 | ||
|
|
01c613e4bc | ||
|
|
295dd76931 | ||
|
|
cd7e7addd7 | ||
|
|
ac9725acc5 | ||
|
|
b6124bdbfb | ||
|
|
989013222e | ||
|
|
87eaf4cf4a | ||
|
|
8e22337e22 | ||
|
|
e130d2c8eb | ||
|
|
a692a9182a | ||
|
|
44fddc9ba1 | ||
|
|
8bbd5958b0 | ||
|
|
6d898aef4c | ||
|
|
412cb58083 | ||
|
|
57ccbc4c15 | ||
|
|
5d3c54a318 | ||
|
|
e5727ad31a | ||
|
|
0cafa0eafe | ||
|
|
bea6067392 | ||
|
|
c056b5e9a2 | ||
|
|
7bb4692ead | ||
|
|
dea4a69cfc | ||
|
|
53d2def225 | ||
|
|
94e2a1793e | ||
|
|
b20ba9c4e5 | ||
|
|
7f4ff1a5c5 | ||
|
|
8fd7098318 | ||
|
|
9662a47204 | ||
|
|
78366c7e2c | ||
|
|
287c3bec51 | ||
|
|
80f625a364 | ||
|
|
978c6c00d6 | ||
|
|
090cb60d49 | ||
|
|
2784c08229 | ||
|
|
b1ab59a8d0 | ||
|
|
3141bc16a5 | ||
|
|
b057375f77 | ||
|
|
4f0c5e5b32 | ||
|
|
9fb409bcc5 | ||
|
|
3d95a1cce5 | ||
|
|
361ddb10de | ||
|
|
133472e7aa | ||
|
|
e4b9dabfbb | ||
|
|
45dbb4a080 | ||
|
|
517c2cc88d | ||
|
|
a1958d47c6 | ||
|
|
6547137fa3 | ||
|
|
651f3dc078 | ||
|
|
8333887235 | ||
|
|
33a8687635 | ||
|
|
7fee883b8b | ||
|
|
e6ea60b131 | ||
|
|
342eff6b38 | ||
|
|
8e5103a026 | ||
|
|
52fa8b8582 | ||
|
|
7892063223 | ||
|
|
5f4a75f904 | ||
|
|
b4e7ebe227 | ||
|
|
9ac13c344b | ||
|
|
9b6c56a07d | ||
|
|
02faddb5ca | ||
|
|
77439dafd0 | ||
|
|
5709daae36 | ||
|
|
f0a153a7f6 | ||
|
|
f2103de785 | ||
|
|
ec984f1697 | ||
|
|
619c449b68 | ||
|
|
8bd20eb128 | ||
|
|
2645411074 | ||
|
|
d8b564530e | ||
|
|
af91e98e32 | ||
|
|
46c39b60c5 | ||
|
|
9153d11e6c | ||
|
|
de20bf01e4 | ||
|
|
56aaa6450b | ||
|
|
5ca9285434 | ||
|
|
7631f6bbfc | ||
|
|
08e2f06d36 | ||
|
|
424643f463 | ||
|
|
84396ed610 | ||
|
|
b28a902253 | ||
|
|
b94ebc582f | ||
|
|
07600df4cb | ||
|
|
00d8024741 | ||
|
|
aa39d9a2df | ||
|
|
98ad3cb2c0 | ||
|
|
88b7b1a88a | ||
|
|
ec1f93c50c | ||
|
|
6157ab451b | ||
|
|
5306aaab07 | ||
|
|
31ab5dafa8 | ||
|
|
84d524c938 | ||
|
|
7bbc4b8a77 | ||
|
|
33dfe60c35 | ||
|
|
78217d37d2 | ||
|
|
69d4c63428 | ||
|
|
1eb75407ae | ||
|
|
2c79d57ded | ||
|
|
de87ba4b37 | ||
|
|
14a666b094 | ||
|
|
84bcf66436 | ||
|
|
cba4a8b80d | ||
|
|
c23e7052fb | ||
|
|
7253253972 | ||
|
|
cddb415f72 | ||
|
|
6aed9f819d | ||
|
|
30ac79c16d | ||
|
|
3f00873a8a | ||
|
|
4f6fb8c362 | ||
|
|
a3dcfa6a6a | ||
|
|
e0999660a8 | ||
|
|
897b87195c | ||
|
|
1dfcb8968c | ||
|
|
3f7bfb38a2 | ||
|
|
5b09536d38 | ||
|
|
21cd3d6212 | ||
|
|
4dc19ba0a9 | ||
|
|
bbaf0c65f1 | ||
|
|
0cb01121eb | ||
|
|
f9c2b9083e | ||
|
|
3c8da7d5bc | ||
|
|
3e49cee7e8 | ||
|
|
0ba2239abb | ||
|
|
87a71c5017 | ||
|
|
761c6172f6 | ||
|
|
b958f2e6dc | ||
|
|
4123b4e575 | ||
|
|
9301072deb | ||
|
|
6d3e061287 | ||
|
|
3ff180c173 | ||
|
|
19b9fde3b2 | ||
|
|
bdc27cc405 | ||
|
|
7e30874db2 | ||
|
|
f9d0fd9bb1 | ||
|
|
3353d6bcce | ||
|
|
904c35e049 | ||
|
|
9ff3a1de42 | ||
|
|
9ab4d1f5e6 | ||
|
|
a0f9efe7d6 | ||
|
|
a6193f3822 | ||
|
|
f0cbff052e | ||
|
|
1929766ed5 | ||
|
|
bd1e9a6a3a | ||
|
|
409d3cb7a2 | ||
|
|
49efe23169 | ||
|
|
46c0518279 | ||
|
|
40396bee2b | ||
|
|
cdbe3b5cee | ||
|
|
aba074edbf | ||
|
|
f11e51056d | ||
|
|
9a178b6016 | ||
|
|
d8ee1658de | ||
|
|
b5c29e15f4 | ||
|
|
21da2dbb78 | ||
|
|
520b36fe41 | ||
|
|
82733c9f71 | ||
|
|
32857c5596 | ||
|
|
1d6146e0b1 | ||
|
|
a095826126 | ||
|
|
fd435cbfc5 | ||
|
|
6ca67b3288 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,14 +1,14 @@
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
/build*/
|
||||
build*/
|
||||
pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
!meta-hob
|
||||
hob-image-*.bb
|
||||
*.swp
|
||||
*.orig
|
||||
@@ -21,6 +21,3 @@ documentation/user-manual/user-manual.html
|
||||
documentation/user-manual/user-manual.pdf
|
||||
documentation/user-manual/user-manual.tgz
|
||||
pull-*/
|
||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
||||
bitbake/lib/toaster/contrib/tts/log/*
|
||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
||||
29
README
29
README
@@ -30,29 +30,20 @@ For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
meta-yocto(-bsp):
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -52,13 +52,6 @@ The following boards are supported by the meta-yocto-bsp layer:
|
||||
For more information see the board's section below. The appropriate MACHINE
|
||||
variable value corresponding to the board is given in brackets.
|
||||
|
||||
Reference Board Maintenance
|
||||
===========================
|
||||
|
||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
||||
|
||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
||||
|
||||
Consumer Devices
|
||||
================
|
||||
@@ -105,7 +98,9 @@ Intel Atom platforms:
|
||||
|
||||
and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE
|
||||
type supports ethernet, wifi, sound, and Intel/vesa graphics by default in
|
||||
addition to common PC input devices, busses, and so on.
|
||||
addition to common PC input devices, busses, and so on. Note that it does not
|
||||
included the binary-only graphic drivers used on some Atom platforms, for
|
||||
accelerated graphics on these machines please refer to meta-intel.
|
||||
|
||||
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
||||
or over the network. Writing generated images to physical media is
|
||||
@@ -249,14 +244,14 @@ if used via a usb card reader):
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
6. If using core-image-minimal rootfs, install the kernel uImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
# cp uImage-beaglebone.bin /media/root/boot/uImage
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
# cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
@@ -322,22 +317,6 @@ Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
||||
=> bootm 1000000 - 2000000
|
||||
|
||||
--- Booting from JFFS2 root ---
|
||||
|
||||
1. First boot the board with NFS root.
|
||||
|
||||
2. Erase the MTD partition which will be used as root:
|
||||
|
||||
$ flash_eraseall /dev/mtd3
|
||||
|
||||
3. Copy the JFFS2 image to the MTD partition:
|
||||
|
||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
||||
|
||||
4. Then reboot the board and set up the environment in U-Boot:
|
||||
|
||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
||||
|
||||
|
||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
==============================================
|
||||
@@ -350,14 +329,11 @@ Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
* TFTP server installed on your workstation
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -365,7 +341,7 @@ Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
--- Booting from NFS root ---
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
|
||||
|
||||
@@ -23,31 +23,340 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
|
||||
import optparse
|
||||
import warnings
|
||||
from traceback import format_exception
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
__version__ = "1.28.0"
|
||||
__version__ = "1.22.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
if not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
sys.exit("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. Parameters are passed to the signature handling code, use 'none' if no specific handler is required.",
|
||||
action = "append", dest = "dump_signatures", default = [])
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-package environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
return server
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
configParams = BitBakeConfigParameters()
|
||||
configuration = cookerdata.CookerConfiguration()
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
server.saveConnectionConfigParams(configParams)
|
||||
|
||||
if not configParams.server_only:
|
||||
if configParams.status_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except:
|
||||
sys.exit(1)
|
||||
if not server_connection:
|
||||
sys.exit(1)
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection(featureset)
|
||||
if not server_connection:
|
||||
if configParams.kill_server:
|
||||
bb.fatal("Server already killed")
|
||||
configParams.bind = configParams.remote_server
|
||||
start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
server_connection = server.establishConnection(featureset)
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,30 +26,24 @@ except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
def usage():
|
||||
print('usage: [BB_SKIP_NETTESTS=yes] %s [-v] [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
print('usage: %s [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
|
||||
verbosity = 1
|
||||
|
||||
tests = sys.argv[1:]
|
||||
if '-v' in sys.argv:
|
||||
tests.remove('-v')
|
||||
verbosity = 2
|
||||
|
||||
if tests:
|
||||
if len(sys.argv) > 1:
|
||||
if '--help' in sys.argv[1:]:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
tests = sys.argv[1:]
|
||||
else:
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
t = '.'.join(t.split('.')[:3])
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests, verbosity=verbosity)
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
|
||||
@@ -10,30 +10,12 @@ import bb
|
||||
import select
|
||||
import errno
|
||||
import signal
|
||||
from multiprocessing import Lock
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
if len(sys.argv) != 2 or sys.argv[1] != "decafbad":
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1] == "decafbadbad":
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
@@ -45,9 +27,6 @@ except ImportError:
|
||||
|
||||
worker_pipe = sys.stdout.fileno()
|
||||
bb.utils.nonblockingfd(worker_pipe)
|
||||
# Need to guard against multiprocessing being used in child processes
|
||||
# and multiple processes trying to write to the parent at the same time
|
||||
worker_pipe_lock = None
|
||||
|
||||
handler = bb.event.LogHandler()
|
||||
logger.addHandler(handler)
|
||||
@@ -84,21 +63,14 @@ def worker_flush():
|
||||
written = os.write(worker_pipe, worker_queue)
|
||||
worker_queue = worker_queue[written:]
|
||||
except (IOError, OSError) as e:
|
||||
if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
def worker_child_fire(event, d):
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
worker_pipe_lock.release()
|
||||
except IOError:
|
||||
sigterm_handler(None, None)
|
||||
raise
|
||||
worker_pipe.write(data)
|
||||
|
||||
bb.event.worker_fire = worker_fire
|
||||
|
||||
@@ -109,11 +81,6 @@ def workerlog_write(msg):
|
||||
lf.write(msg)
|
||||
lf.flush()
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
@@ -162,28 +129,19 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
bb.utils.signal_on_parent_exit("SIGTERM")
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
bb.event.worker_pid = os.getpid()
|
||||
bb.event.worker_fire = worker_child_fire
|
||||
worker_pipe = pipeout
|
||||
worker_pipe_lock = Lock()
|
||||
|
||||
# Make the child the process group leader and ensure no
|
||||
# child process will be controlled by the current terminal
|
||||
# This ensures signals sent to the controlling terminal like Ctrl+C
|
||||
# don't stop the child processes.
|
||||
os.setsid()
|
||||
# Make the child the process group leader
|
||||
os.setpgid(0, 0)
|
||||
# No stdin
|
||||
newsi = os.open(os.devnull, os.O_RDWR)
|
||||
os.dup2(newsi, sys.stdin.fileno())
|
||||
@@ -196,11 +154,15 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
data.setVar("DATE", workerdata["date"])
|
||||
data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["hashes"], workerdata["hash_deps"], workerdata["sigchecksums"])
|
||||
ret = 0
|
||||
try:
|
||||
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
for h in workerdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, workerdata["hashes"][h])
|
||||
for h in workerdata["hash_deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, workerdata["hash_deps"][h])
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
@@ -221,22 +183,11 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
logger.critical(str(exc))
|
||||
os._exit(1)
|
||||
try:
|
||||
if cfg.dry_run:
|
||||
return 0
|
||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
if not cfg.dry_run:
|
||||
ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
os._exit(ret)
|
||||
except:
|
||||
os._exit(1)
|
||||
if not profiling:
|
||||
os._exit(child())
|
||||
else:
|
||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
ret = profile.Profile.runcall(prof, child)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
@@ -294,14 +245,9 @@ class BitbakeWorker(object):
|
||||
self.build_pipes = {}
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
@@ -309,16 +255,13 @@ class BitbakeWorker(object):
|
||||
def serve(self):
|
||||
while True:
|
||||
(ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
|
||||
if self.input in ready:
|
||||
if self.input in ready or len(self.queue):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
r = self.input.read()
|
||||
if len(r) == 0:
|
||||
# EOF on pipe, server must have terminated
|
||||
self.sigterm_exception(signal.SIGTERM, None)
|
||||
self.queue = self.queue + r
|
||||
self.queue = self.queue + self.input.read()
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if len(self.queue):
|
||||
end = len(self.queue)
|
||||
self.handle_item("cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item("workerdata", self.handle_workerdata)
|
||||
self.handle_item("runtask", self.handle_runtask)
|
||||
@@ -419,16 +362,7 @@ class BitbakeWorker(object):
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
profname = "profile-worker.log"
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
profile.Profile.runcall(prof, worker.serve)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
worker.serve()
|
||||
except BaseException as e:
|
||||
if not normalexit:
|
||||
import traceback
|
||||
|
||||
@@ -34,7 +34,7 @@ from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/echo ERROR: This script needs to be sourced. Please run as .
|
||||
#!/bin/bash
|
||||
# (c) 2013 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
@@ -16,360 +16,189 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
# This script can be run in two modes.
|
||||
|
||||
# When used with "source", from a build directory,
|
||||
# it enables toaster event logging and starts the bitbake resident server.
|
||||
# use as: source toaster [start|stop] [noweb] [noui]
|
||||
|
||||
# When it is called as a stand-alone script, it starts just the
|
||||
# web server, and the building shall be done through the web interface.
|
||||
# As script, it will not return to the command prompt. Stop with Ctrl-C.
|
||||
# This script enables toaster event logging and
|
||||
# starts bitbake resident server
|
||||
# use as: source toaster [start|stop]
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
webserverKillAll()
|
||||
function webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
# you can always add a superuser later via
|
||||
# python bitbake/lib/toaster/manage.py python manage.py createsuperuser --username=<ME>
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb --noinput || retval=1
|
||||
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, aborting system start" 1>&2
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
echo -e "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
fi
|
||||
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError on orm migration, rolling back...\n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
return $retval
|
||||
fi
|
||||
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError on bldcontrol migration, rolling back...\n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol 0001_initial --fake
|
||||
return $retval
|
||||
fi
|
||||
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
|
||||
if [ $retval -eq 1 ]; then
|
||||
printf "\nError while checking settings; aborting\n"
|
||||
return $retval
|
||||
fi
|
||||
|
||||
echo "Starting webserver..."
|
||||
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
|
||||
sleep 1
|
||||
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
|
||||
return $retval
|
||||
}
|
||||
|
||||
# Helper functions to add a special configuration file
|
||||
|
||||
addtoConfiguration()
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
function stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
if [ $INSTOPSYSTEM == 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
if [ -f ${BUILDDIR}/.toasterui.pid ]; then
|
||||
kill `cat ${BUILDDIR}/.toasterui.pid` 2>/dev/null
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
stop_bitbake
|
||||
BBSERVER=localhost:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset DATABASE_URL
|
||||
unset TOASTER_CONF
|
||||
unset TOASTER_DIR
|
||||
# force stop any misbehaving bitbake server
|
||||
lsof bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill
|
||||
trap - SIGHUP
|
||||
#trap - SIGCHLD
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
start_bitbake() {
|
||||
unset BBSERVER
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Bitbake server start failed"
|
||||
return 1
|
||||
fi
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
export DATABASE_URL=`$BBBASEDIR/lib/toaster/manage.py get-dburl`
|
||||
if [ $NOTOASTERUI -eq 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui --remote-server=$BBSERVER -t xmlrpc >>${BUILDDIR}/toaster_ui.log 2>&1 \
|
||||
& echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
stop_bitbake() {
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
unset BBSERVER
|
||||
# force stop any misbehaving bitbake server
|
||||
lsof bitbake.lock | awk '{print $2}' | grep "[0-9]\+" | xargs -n1 -r kill
|
||||
}
|
||||
|
||||
check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 `cat $1` 2>/dev/null
|
||||
function check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 $(< $1) 2>/dev/null
|
||||
}
|
||||
|
||||
|
||||
notify_chldexit() {
|
||||
if [ $NOTOASTERUI -eq 0 ]; then
|
||||
function notify_chldexit() {
|
||||
if [ $NOTOASTERUI == 0 ]; then
|
||||
check_pidbyfile ${BUILDDIR}/.toasterui.pid && return
|
||||
stop_system
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
verify_prereq() {
|
||||
# Verify prerequisites
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
fi
|
||||
|
||||
BBBASEDIR=`dirname $TOASTER`/..
|
||||
OEROOT=`dirname $TOASTER`/../..
|
||||
RUNNING=0
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
if [ "$WEB_PORT" = "" ]; then
|
||||
WEB_PORT="8000"
|
||||
fi
|
||||
# this is the configuraton file we are using for toaster
|
||||
# we are using the same logic that oe-setup-builddir uses
|
||||
# (based on TEMPLATECONF and .templateconf) to determine
|
||||
# which toasterconf.json to use.
|
||||
# note: There are a number of relative path assumptions
|
||||
# in the local layers that currently make using an arbitrary
|
||||
# toasterconf.json difficult.
|
||||
|
||||
. $OEROOT/.templateconf
|
||||
if [ -n "$TEMPLATECONF" ]; then
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
# Allow TEMPLATECONF=meta-xyz/conf as a shortcut
|
||||
if [ -d "$OEROOT/$TEMPLATECONF" ]; then
|
||||
TEMPLATECONF="$OEROOT/$TEMPLATECONF"
|
||||
fi
|
||||
if [ ! -d "$TEMPLATECONF" ]; then
|
||||
echo >&2 "Error: '$TEMPLATECONF' must be a directory containing toasterconf.json"
|
||||
[ "$TOASTER_MANAGED" = '1' ] && exit 1 || return 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$TOASTER_CONF" = "" ]; then
|
||||
TOASTER_CONF="$TEMPLATECONF/toasterconf.json"
|
||||
export TOASTER_CONF=$(python -c "import os; print os.path.realpath('$TOASTER_CONF')")
|
||||
fi
|
||||
if [ ! -f $TOASTER_CONF ]; then
|
||||
echo "$TOASTER_CONF configuration file not found. Set TOASTER_CONF to specify file or fix .templateconf"
|
||||
[ "$TOASTER_MANAGED" = '1' ] && exit 1 || return 1
|
||||
fi
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
|
||||
export TOASTER_DIR=`pwd`
|
||||
|
||||
NOBROWSER=0
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
nobrowser )
|
||||
NOBROWSER=1
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as . $TOASTER"
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
echo "Error: This script needs to be sourced. Please run as 'source toaster [start|stop]'" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$1" = 'restart-bitbake' ] ; then
|
||||
stop_bitbake
|
||||
sleep 1
|
||||
start_bitbake
|
||||
rc=$?
|
||||
sleep 1
|
||||
return $rc
|
||||
fi
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
# this is the configuraton file we are using for toaster
|
||||
# note default is assuming yocto. Override this if you are
|
||||
# running in a pure OE environment and use the toasterconf.json
|
||||
# in meta/conf/toasterconf.json
|
||||
# note: for future there are a number of relative path assumptions
|
||||
# in the local layers that currently prevent using an arbitrary
|
||||
# toasterconf.json
|
||||
if [ "$TOASTER_CONF" = "" ]; then
|
||||
TOASTER_CONF="$(dirname $TOASTER)/../../meta-yocto/conf/toasterconf.json"
|
||||
export TOASTER_CONF=$(python -c "import os; print os.path.realpath('$TOASTER_CONF')")
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,5) == django.VERSION[0:2]" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5. Please install with\n\nsudo pip install django==1.5"
|
||||
return 2
|
||||
fi
|
||||
if [ ! -f $TOASTER_CONF ]; then
|
||||
echo "$TOASTER_CONF configuration file not found. set TOASTER_CONF to specify a path"
|
||||
return 1
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\nsudo pip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
# this defines the dir toaster will use for
|
||||
# 1) clones of layers (in _toaster_clones )
|
||||
# 2) the build dir (in build)
|
||||
# 3) the sqlite db if that is being used.
|
||||
# 4) pid's we need to clean up on exit/shutdown
|
||||
# note: for future. in order to make this an arbitrary directory, we need to
|
||||
# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does.
|
||||
export TOASTER_DIR=`dirname $BUILDDIR`
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$1" = 'start' ] || [ "$1" = 'stop' ]; then
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
|
||||
lock=1
|
||||
if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
python -c "import fcntl; fcntl.flock(open(\"$BUILDDIR/bitbake.lock\"), fcntl.LOCK_EX|fcntl.LOCK_NB)" 2>/dev/null || lock=0
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
start_success=1
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
start_bitbake
|
||||
if [ $? -eq 0 ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
|
||||
unset BBSERVER
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=localhost:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
@@ -381,3 +210,5 @@ case $CMD in
|
||||
echo "Successful ${CMD}."
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program and set exit code to the returned value
|
||||
sys.exit(toasterui.main(mock_connection.connection, mock_connection.events, configParams))
|
||||
@@ -53,6 +53,7 @@ fun! NewBBTemplate()
|
||||
put ='LICENSE = \"\"'
|
||||
put ='SECTION = \"\"'
|
||||
put ='DEPENDS = \"\"'
|
||||
put ='PR = \"r0\"'
|
||||
put =''
|
||||
put ='SRC_URI = \"\"'
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# The Makefile generates an HTML and PDF version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
@@ -21,8 +21,8 @@
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
# The first example generates the HTML and PDF versions of the User Manual.
|
||||
# The second example generates the HTML version only of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@@ -31,9 +31,9 @@ XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
|
||||
@@ -1,25 +1,11 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
<xsl:include href="../template/component.title.xsl"/>
|
||||
<xsl:include href="../template/division.title.xsl"/>
|
||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
||||
<xsl:param name="chapter.autolabel" select="1" />
|
||||
<!-- <xsl:param name="appendix.autolabel" select="A" /> -->
|
||||
<xsl:param name="section.autolabel" select="1" />
|
||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
|
||||
<para>
|
||||
The primary purpose for running BitBake is to produce some kind
|
||||
of output such as a single installable package, a kernel, a software
|
||||
development kit, or even a full, board-specific bootable Linux image,
|
||||
complete with bootloader, kernel, and root filesystem.
|
||||
of output such as an image, a kernel, or a software development
|
||||
kit.
|
||||
Of course, you can execute the <filename>bitbake</filename>
|
||||
command with options that cause it to execute single tasks,
|
||||
compile single recipe files, capture or clear data, or simply
|
||||
@@ -21,48 +20,29 @@
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
||||
section.
|
||||
<note>
|
||||
<para>
|
||||
Prior to executing BitBake, you should take advantage of available
|
||||
parallel thread execution on your build host by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your project's <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A possibly simpler solution is that some Linux distributions
|
||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Prior to executing BitBake, you should take advantage of parallel
|
||||
thread execution by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</note>
|
||||
|
||||
<section id='parsing-the-base-configuration-metadata'>
|
||||
<title>Parsing the Base Configuration Metadata</title>
|
||||
|
||||
<para>
|
||||
The first thing BitBake does is parse base configuration
|
||||
metadata.
|
||||
Base configuration metadata consists of your project's
|
||||
Base configuration metadata consists of the
|
||||
<filename>bblayers.conf</filename> file to determine what
|
||||
layers BitBake needs to recognize, all necessary
|
||||
<filename>layer.conf</filename> files (one from each layer),
|
||||
@@ -91,11 +71,10 @@
|
||||
and
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
<filename>BBPATH</filename> is used to search for
|
||||
configuration and class files under the
|
||||
<filename>conf</filename> and <filename>classes</filename>
|
||||
configuration and class files under
|
||||
<filename>conf/</filename> and <filename>class/</filename>
|
||||
directories, respectively.
|
||||
<filename>BBFILES</filename> is used to locate both recipe
|
||||
and recipe append files
|
||||
<filename>BBFILES</filename> is used to find recipe files
|
||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
||||
If there is no <filename>bblayers.conf</filename> file,
|
||||
it is assumed the user has set the <filename>BBPATH</filename>
|
||||
@@ -103,7 +82,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, the <filename>bitbake.conf</filename> file is located
|
||||
Next, the <filename>bitbake.conf</filename> file is searched
|
||||
using the <filename>BBPATH</filename> variable that was
|
||||
just constructed.
|
||||
The <filename>bitbake.conf</filename> file may also include other
|
||||
@@ -138,18 +117,18 @@
|
||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
||||
This file is expected to contain a
|
||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
||||
variable that is a space-delimited list of 'layer' directories.
|
||||
variable that is a space delimited list of 'layer' directories.
|
||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> variables directly in the environment.
|
||||
file then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
||||
file is located and parsed with the
|
||||
file is searched for and parsed with the
|
||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
||||
variable being set to the directory where the layer was found.
|
||||
The idea is these files automatically set up
|
||||
The idea is these files automatically setup
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and other variables correctly for a given build directory.
|
||||
</para>
|
||||
@@ -164,7 +143,7 @@
|
||||
|
||||
<para>
|
||||
Only variable definitions and include directives are allowed
|
||||
in BitBake <filename>.conf</filename> files.
|
||||
in <filename>.conf</filename> files.
|
||||
Some variables directly influence BitBake's behavior.
|
||||
These variables might have been set from the environment
|
||||
depending on the environment variables previously
|
||||
@@ -187,8 +166,7 @@
|
||||
Other classes that are specified in the configuration using the
|
||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
||||
variable are also included.
|
||||
BitBake searches for class files in a
|
||||
<filename>classes</filename> subdirectory under
|
||||
BitBake searches for class files in a "classes" subdirectory under
|
||||
the paths in <filename>BBPATH</filename> in the same way as
|
||||
configuration files.
|
||||
</para>
|
||||
@@ -211,7 +189,7 @@
|
||||
If a recipe uses a closing curly brace within the function and
|
||||
the character has no leading spaces, BitBake produces a parsing
|
||||
error.
|
||||
If you use a pair of curly braces in a shell function, the
|
||||
If you use a pair of curly brace in a shell function, the
|
||||
closing curly brace must not be located at the start of the line
|
||||
without leading spaces.
|
||||
</para>
|
||||
@@ -283,14 +261,14 @@
|
||||
One common convention is to use the recipe filename to define
|
||||
pieces of metadata.
|
||||
For example, in <filename>bitbake.conf</filename> the recipe
|
||||
name and version are used to set the variables
|
||||
name and version set
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
In this example, a recipe called "something_1.2.3.bb" sets
|
||||
<filename>PN</filename> to "something" and
|
||||
<filename>PV</filename> to "1.2.3".
|
||||
</para>
|
||||
@@ -353,55 +331,38 @@
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-providers'>
|
||||
<title>Providers</title>
|
||||
<title>Preferences and Providers</title>
|
||||
|
||||
<para>
|
||||
Assuming BitBake has been instructed to execute a target
|
||||
and that all the recipe files have been parsed, BitBake
|
||||
starts to figure out how to build the target.
|
||||
BitBake looks through the <filename>PROVIDES</filename> list
|
||||
for each of the recipes.
|
||||
A <filename>PROVIDES</filename> list is the list of names by which
|
||||
the recipe can be known.
|
||||
Each recipe's <filename>PROVIDES</filename> list is created
|
||||
implicitly through the recipe's
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
and explicitly through the recipe's
|
||||
BitBake starts by looking through the
|
||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
||||
variable, which is optional.
|
||||
set in recipe files.
|
||||
The default <filename>PROVIDES</filename> for a recipe is its name
|
||||
(<link linkend='var-PN'><filename>PN</filename></link>),
|
||||
however, a recipe can provide multiple things.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
||||
functionality can be found under an alternative name or names other
|
||||
than the implicit <filename>PN</filename> name.
|
||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
||||
contained the following:
|
||||
As an example of adding an extra provider, suppose a recipe named
|
||||
<filename>foo_1.0.bb</filename> contained the following:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "fullkeyboard"
|
||||
PROVIDES += "virtual/bar_1.0"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
||||
Consequently, the functionality found in
|
||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
||||
different names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-preferences'>
|
||||
<title>Preferences</title>
|
||||
|
||||
<para>
|
||||
The <filename>PROVIDES</filename> list is only part of the solution
|
||||
for figuring out a target's recipes.
|
||||
Because targets might have multiple providers, BitBake needs
|
||||
to prioritize providers by determining provider preferences.
|
||||
The recipe now provides both "foo_1.0" and "virtual/bar_1.0".
|
||||
The "virtual/" namespace is often used to denote cases where
|
||||
multiple providers are expected with the user choosing between
|
||||
them.
|
||||
Kernels and toolchain components are common cases of this in
|
||||
OpenEmbedded.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common example in which a target has multiple providers
|
||||
is "virtual/kernel", which is on the
|
||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
||||
Sometimes a target might have multiple providers.
|
||||
A common example is "virtual/kernel", which is provided by each
|
||||
kernel recipe.
|
||||
Each machine often selects the best kernel provider by using a
|
||||
line similar to the following in the machine configuration file:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -416,7 +377,7 @@
|
||||
|
||||
<para>
|
||||
Understanding how providers are chosen is made complicated by the fact
|
||||
that multiple versions might exist for a given provider.
|
||||
that multiple versions might exist.
|
||||
BitBake defaults to the highest version of a provider.
|
||||
Version comparisons are made using the same method as Debian.
|
||||
You can use the
|
||||
@@ -425,19 +386,13 @@
|
||||
You can influence the order by using the
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, files have a preference of "0".
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
recipe unlikely to be used unless it is explicitly referenced.
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
||||
likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any
|
||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
||||
and more experimental recipe versions until they have undergone
|
||||
sufficient testing to be considered stable.
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "1" makes it likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any <filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer and more experimental recipe
|
||||
versions until they have undergone sufficient testing to be considered stable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -446,16 +401,18 @@
|
||||
version, unless otherwise specified.
|
||||
If the recipe in question has a
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
set lower than the other recipes (default is 0), then
|
||||
it will not be selected.
|
||||
set lower than
|
||||
the other recipes (default is 0), then it will not be
|
||||
selected.
|
||||
This allows the person or persons maintaining
|
||||
the repository of recipe files to specify
|
||||
their preference for the default selected version.
|
||||
Additionally, the user can specify their preferred version.
|
||||
In addition, the user can specify their preferred version.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>,
|
||||
then the
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
will be set to “a”, and the
|
||||
<link linkend='var-PV'><filename>PV</filename></link>
|
||||
@@ -463,38 +420,19 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
||||
If we then have a recipe named <filename>a_1.2.bb</filename>, BitBake
|
||||
will choose 1.2 by default.
|
||||
However, if you define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, you
|
||||
can change that preference:
|
||||
However, if we define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, we
|
||||
can change that.
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_VERSION_a = "1.1"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
It is common for a recipe to provide two versions -- a stable,
|
||||
numbered (and preferred) version, and a version that is
|
||||
automatically checked out from a source code repository that
|
||||
is considered more "bleeding edge" but can be selected only
|
||||
explicitly.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For example, in the OpenEmbedded codebase, there is a standard,
|
||||
versioned recipe file for BusyBox,
|
||||
<filename>busybox_1.22.1.bb</filename>,
|
||||
but there is also a Git-based version,
|
||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
||||
<literallayout class='monospaced'>
|
||||
DEFAULT_PREFERENCE = "-1"
|
||||
</literallayout>
|
||||
to ensure that the numbered, stable version is always preferred
|
||||
unless the developer selects otherwise.
|
||||
</para>
|
||||
</note>
|
||||
<para>
|
||||
In summary, BitBake has created a list of providers, which is prioritized, for each target.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-dependencies'>
|
||||
@@ -557,7 +495,7 @@
|
||||
As each task completes, a timestamp is written to the directory specified by the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
||||
On subsequent runs, BitBake looks in the build directory within
|
||||
<filename>tmp/stamps</filename> and does not rerun
|
||||
<filename>tmp/stamps</filename>and does not rerun
|
||||
tasks that are already completed unless a timestamp is found to be invalid.
|
||||
Currently, invalid timestamps are only considered on a per
|
||||
recipe file basis.
|
||||
@@ -597,7 +535,7 @@
|
||||
<title>Executing Tasks</title>
|
||||
|
||||
<para>
|
||||
Tasks can be either a shell task or a Python task.
|
||||
Tasks can either be a shell task or a Python task.
|
||||
For shell tasks, BitBake writes a shell script to
|
||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
||||
and then executes the script.
|
||||
@@ -784,13 +722,13 @@
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_task-<taskname></filename>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_<filename:taskname></filename>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BBHASHDEPS_<filename:taskname></filename>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
@@ -817,9 +755,9 @@
|
||||
to determine the stamps and delta where these two
|
||||
stamp trees diverge.
|
||||
<note>
|
||||
It is likely that future versions of BitBake will
|
||||
It is likely that future versions of BitBake with
|
||||
provide other signature handlers triggered through
|
||||
additional "-S" parameters.
|
||||
additional "-S" paramters.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
BitBake's fetch module is a standalone piece of library code
|
||||
that deals with the intricacies of downloading source code
|
||||
and files from remote systems.
|
||||
Fetching source code is one of the cornerstones of building software.
|
||||
Fetching source code is one of the corner stones of building software.
|
||||
As such, this module forms an important part of BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The current fetch module is called "fetch2" and refers to the
|
||||
fact that it is the second major version of the API.
|
||||
The original version is obsolete and has been removed from the codebase.
|
||||
The original version is obsolete and removed from the codebase.
|
||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
||||
manual.
|
||||
</para>
|
||||
@@ -60,19 +60,17 @@
|
||||
<note>
|
||||
For convenience, the naming in these examples matches
|
||||
the variables used by OpenEmbedded.
|
||||
If you want to see the above code in action, examine
|
||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
||||
</note>
|
||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
||||
variables are not hardcoded into the fetcher, since those fetcher
|
||||
methods can be (and are) called with different variable names.
|
||||
variables are not coded into the fetcher.
|
||||
They variables can (and are) called with different variable names.
|
||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
||||
the fetch module to fetch the sstate files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When the <filename>download()</filename> method is called,
|
||||
BitBake tries to resolve the URLs by looking for source files
|
||||
BitBake tries to fulfill the URLs by looking for source files
|
||||
in a specific search order:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
||||
@@ -86,7 +84,7 @@
|
||||
<filename>SRC_URI</filename>).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
||||
If fetch failures occur, BitBake next uses mirror locations as
|
||||
If fetch failures occur, BitBake next uses mirror location as
|
||||
defined by the
|
||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
||||
variable.
|
||||
@@ -151,14 +149,14 @@
|
||||
<para>
|
||||
File integrity is of key importance for reproducing builds.
|
||||
For non-local archive downloads, the fetcher code can verify
|
||||
SHA-256 and MD5 checksums to ensure the archives have been
|
||||
sha256 and md5 checksums to ensure the archives have been
|
||||
downloaded correctly.
|
||||
You can specify these checksums by using the
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[md5sum] = "value"
|
||||
SRC_URI[sha256sum] = "value"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
@@ -262,8 +260,8 @@
|
||||
<para>
|
||||
This submodule handles URLs that begin with
|
||||
<filename>file://</filename>.
|
||||
The filename you specify within the URL can be
|
||||
either an absolute or relative path to a file.
|
||||
The filename you specify with in the URL can
|
||||
either be an absolute or relative path to a file.
|
||||
If the filename is relative, the contents of the
|
||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
||||
variable is used in the same way
|
||||
@@ -288,64 +286,15 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of example URLs, the first relative and
|
||||
the second absolute:
|
||||
Here are some example URLs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "file://relativefile.patch"
|
||||
SRC_URI = "file://relativefile.patch;this=ignored"
|
||||
SRC_URI = "file:///Users/ich/very_important_software"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
<note>
|
||||
Because URL parameters are delimited by semi-colons, this can
|
||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
||||
for example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
||||
</literallayout>
|
||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
||||
</literallayout>
|
||||
In most cases this should work. Treating semi-colons and '&' in queries
|
||||
identically is recommended by the World Wide Web Consortium (W3C).
|
||||
Note that due to the nature of the URL, you may have to specify the name
|
||||
of the downloaded file as well:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
||||
</literallayout>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
||||
|
||||
@@ -364,7 +313,7 @@
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
<listitem><para><emphasis><filename>CVSDIR</filename>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
@@ -385,7 +334,7 @@
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"method":</emphasis>
|
||||
The protocol over which to communicate with the CVS server.
|
||||
The protocol over which to communicate with the cvs server.
|
||||
By default, this protocol is "pserver".
|
||||
If "method" is set to "ext", BitBake examines the
|
||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
||||
@@ -413,8 +362,7 @@
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
directory relative to <filename>CVSDIR</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
@@ -446,6 +394,36 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to a sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handheld.sorg/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='svn-fetcher'>
|
||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
||||
|
||||
@@ -455,9 +433,9 @@
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
The fetcher's temporary working directory is set
|
||||
by <filename>SVNDIR</filename>, which is usually
|
||||
<filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -488,13 +466,6 @@
|
||||
compile-time when set to "keep".
|
||||
By default, these directories are removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
||||
When required, sets the username for the transport.
|
||||
By default, this parameter is empty.
|
||||
The transport username is different than the username
|
||||
used in the main URL, which is passed to the subversion
|
||||
command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Following are two examples using svn:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -505,15 +476,14 @@
|
||||
</section>
|
||||
|
||||
<section id='git-fetcher'>
|
||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
||||
<title>GIT Fetcher (<filename>git://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
@@ -581,7 +551,7 @@
|
||||
network.
|
||||
For that reason, tags are often not used.
|
||||
As far as Git is concerned, the "tag" parameter behaves
|
||||
effectively the same as the "rev" parameter.
|
||||
effectively the same as the "revision" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"subpath":</emphasis>
|
||||
Limits the checkout to a specific subpath of the tree.
|
||||
@@ -600,122 +570,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='gitsm-fetcher'>
|
||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule inherits from the
|
||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
||||
that fetcher's behavior by fetching a repository's submodules.
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
is passed to the Git fetcher as described in the
|
||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
||||
section.
|
||||
<note>
|
||||
<title>Notes and Warnings</title>
|
||||
<para>
|
||||
You must clean a recipe when switching between
|
||||
'<filename>git://</filename>' and
|
||||
'<filename>gitsm://</filename>' URLs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Git Submodules fetcher is not a complete fetcher
|
||||
implementation.
|
||||
The fetcher has known issues where it does not use the
|
||||
normal source mirroring infrastructure properly.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='clearcase-fetcher'>
|
||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from a
|
||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
||||
repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
which one is available.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are options for the <filename>SRC_URI</filename>
|
||||
statement:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
||||
The name, which must include the
|
||||
prepending "/" character, of the ClearCase VOB.
|
||||
This option is required.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
<filename>https</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, the fetcher creates a configuration specification.
|
||||
If you want this specification written to an area other than the default,
|
||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
||||
in your recipe to define where the specification is written.
|
||||
<note>
|
||||
the <filename>SRCREV</filename> loses its functionality if you
|
||||
specify this variable.
|
||||
However, <filename>SRCREV</filename> is still used to label the
|
||||
archive after a fetch even though it does not define what is
|
||||
fetched.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of other behaviors worth mentioning:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
When using <filename>cleartool</filename>, the login of
|
||||
<filename>cleartool</filename> is handled by the system.
|
||||
The login require no special steps.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
In order to use <filename>rcleartool</filename> with authenticated
|
||||
users, an "rcleartool login" is necessary before using the fetcher.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
@@ -728,6 +582,9 @@
|
||||
<listitem><para>
|
||||
Perforce (<filename>p4://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Git Submodules (<filename>gitsm://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
|
||||
@@ -221,7 +221,7 @@
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ BBPATH="<projectdirectory>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
Welcome to the BitBake User Manual.
|
||||
This manual provides information on the BitBake tool.
|
||||
The information attempts to be as independent as possible regarding
|
||||
systems that use BitBake, such as OpenEmbedded and the
|
||||
Yocto Project.
|
||||
In some cases, scenarios or examples within the context of
|
||||
systems that use BitBake, such as the Yocto Project and
|
||||
OpenEmbedded.
|
||||
In some cases, scenarios or examples that within the context of
|
||||
a build system are used in the manual to help with understanding.
|
||||
For these cases, the manual clearly states the context.
|
||||
</para>
|
||||
@@ -35,31 +35,28 @@
|
||||
<listitem><para>
|
||||
BitBake executes tasks according to provided
|
||||
metadata that builds up the tasks.
|
||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
||||
and related recipe "append" (<filename>.bbappend</filename>)
|
||||
files, configuration (<filename>.conf</filename>) and
|
||||
underlying include (<filename>.inc</filename>) files, and
|
||||
in class (<filename>.bbclass</filename>) files.
|
||||
The metadata provides
|
||||
Metadata is stored in recipe (<filename>.bb</filename>),
|
||||
configuration (<filename>.conf</filename>), and class
|
||||
(<filename>.bbclass</filename>) files and provides
|
||||
BitBake with instructions on what tasks to run and
|
||||
the dependencies between those tasks.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a fetcher library for obtaining source
|
||||
code from various places such as local files, source control
|
||||
systems, or websites.
|
||||
code from various places such as source control
|
||||
systems or websites.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The instructions for each unit to be built (e.g. a piece
|
||||
of software) are known as "recipe" files and
|
||||
of software) are known as recipe files and
|
||||
contain all the information about the unit
|
||||
(dependencies, source file locations, checksums, description
|
||||
and so on).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a client/server abstraction and can
|
||||
be used from a command line or used as a service over
|
||||
XML-RPC and has several different user interfaces.
|
||||
be used from a command line or used as a service over XMLRPC and
|
||||
has several different user interfaces.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -72,7 +69,7 @@
|
||||
BitBake was originally a part of the OpenEmbedded project.
|
||||
It was inspired by the Portage package management system
|
||||
used by the Gentoo Linux distribution.
|
||||
On December 7, 2004, OpenEmbedded project team member
|
||||
On December 7, 2004, OpenEmbedded project team member,
|
||||
Chris Larson split the project into two distinct pieces:
|
||||
<itemizedlist>
|
||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
||||
@@ -82,11 +79,8 @@
|
||||
Today, BitBake is the primary basis of the
|
||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
||||
project, which is being used to build and maintain Linux
|
||||
distributions such as the
|
||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
||||
and which is also being used as the build tool for Linux projects
|
||||
such as the
|
||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
||||
distributions such as the Angstrom Distribution and which is used
|
||||
as the build tool for Linux projects such as the Yocto Project.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -94,7 +88,7 @@
|
||||
an aspiring embedded Linux distribution.
|
||||
All of the build systems used by traditional desktop Linux
|
||||
distributions lacked important functionality, and none of the
|
||||
ad hoc Buildroot-based systems, prevalent in the
|
||||
ad-hoc Buildroot-based systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.
|
||||
</para>
|
||||
|
||||
@@ -144,7 +138,7 @@
|
||||
projects for their builds.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Provide an inheritance mechanism to share
|
||||
Provide an inheritance mechanism that share
|
||||
common metadata between many packages.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -157,7 +151,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Split metadata into layers and allow layers
|
||||
to enhance or override other layers.
|
||||
to override each other.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Allow representation of a given set of input variables
|
||||
@@ -184,14 +178,14 @@
|
||||
what tasks are required to run, and executes those tasks.
|
||||
Similar to GNU Make, BitBake controls how software is
|
||||
built.
|
||||
GNU Make achieves its control through "makefiles", while
|
||||
GNU Make achieves its control through "makefiles".
|
||||
BitBake uses "recipes".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake extends the capabilities of a simple
|
||||
tool like GNU Make by allowing for the definition of much more
|
||||
complex tasks, such as assembling entire embedded Linux
|
||||
tool like GNU Make by allowing for much more complex tasks
|
||||
to be completed, such as assembling entire embedded Linux
|
||||
distributions.
|
||||
</para>
|
||||
|
||||
@@ -209,20 +203,14 @@
|
||||
<filename>.bb</filename>, are the most basic metadata files.
|
||||
These recipe files provide BitBake with the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>Descriptive information about the
|
||||
package (author, homepage, license, and so on)</para></listitem>
|
||||
<listitem><para>Descriptive information about the package</para></listitem>
|
||||
<listitem><para>The version of the recipe</para></listitem>
|
||||
<listitem><para>Existing dependencies (both build
|
||||
and runtime dependencies)</para></listitem>
|
||||
<listitem><para>Where the source code resides and
|
||||
how to fetch it</para></listitem>
|
||||
<listitem><para>Whether the source code requires
|
||||
any patches, where to find them, and how to apply
|
||||
them</para></listitem>
|
||||
<listitem><para>How to configure and compile the
|
||||
source code</para></listitem>
|
||||
<listitem><para>Existing Dependencies</para></listitem>
|
||||
<listitem><para>Where the source code resides</para></listitem>
|
||||
<listitem><para>Whether the source code requires any patches</para></listitem>
|
||||
<listitem><para>How to compile the source code</para></listitem>
|
||||
<listitem><para>Where on the target machine to install the
|
||||
package or packages created</para></listitem>
|
||||
package being compiled</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -234,11 +222,7 @@
|
||||
The term "package" is also commonly used to describe recipes.
|
||||
However, since the same word is used to describe packaged
|
||||
output from a project, it is best to maintain a single
|
||||
descriptive term - "recipes".
|
||||
Put another way, a single "recipe" file is quite capable
|
||||
of generating a number of related but separately installable
|
||||
"packages".
|
||||
In fact, that ability is fairly common.
|
||||
descriptive term, "recipes".
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
@@ -273,7 +257,7 @@
|
||||
called <filename>base.bbclass</filename>.
|
||||
You can find this file in the
|
||||
<filename>classes</filename> directory.
|
||||
The <filename>base.bbclass</filename> class files is special since it
|
||||
The <filename>base.bbclass</filename> is special since it
|
||||
is always included automatically for all recipes
|
||||
and classes.
|
||||
This class contains definitions for standard basic tasks such
|
||||
@@ -300,8 +284,7 @@
|
||||
To illustrate how you can use layers to keep things modular,
|
||||
consider customizations you might make to support a specific target machine.
|
||||
These types of customizations typically reside in a special layer,
|
||||
rather than a general layer, called a Board Support Package (BSP)
|
||||
Layer.
|
||||
rather than a general layer, called a Board Specific Package (BSP) Layer.
|
||||
Furthermore, the machine customizations should be isolated from
|
||||
recipes and metadata that support a new GUI environment, for
|
||||
example.
|
||||
@@ -321,8 +304,9 @@
|
||||
|
||||
<para>
|
||||
Append files, which are files that have the
|
||||
<filename>.bbappend</filename> file extension, extend or
|
||||
override information in an existing recipe file.
|
||||
<filename>.bbappend</filename> file extension, add or
|
||||
extend build information to an existing
|
||||
recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -335,9 +319,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Information in append files extends or
|
||||
overrides the information in the underlying,
|
||||
similarly-named recipe files.
|
||||
Information in append files overrides the information in the
|
||||
similarly-named recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -362,12 +345,6 @@
|
||||
However, if you named the append file
|
||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the most general case, you could name the append file something as
|
||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
||||
version independent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -436,20 +413,6 @@
|
||||
you have a directory entitled
|
||||
<filename>bitbake-1.17.0</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
||||
Build Checkout:</emphasis>
|
||||
A final possibility for getting a copy of BitBake is that it
|
||||
already comes with your checkout of a larger Bitbake-based build
|
||||
system, such as Poky or Yocto Project.
|
||||
Rather than manually checking out individual layers and
|
||||
gluing them together yourself, you can check
|
||||
out an entire build system.
|
||||
The checkout will already include a version of BitBake that
|
||||
has been thoroughly tested for compatibility with the other
|
||||
components.
|
||||
For information on how to check out a particular BitBake-based
|
||||
build system, consult that build system's supporting documentation.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -508,16 +471,14 @@
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run Don't execute, just go through the motions.
|
||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
||||
-S DUMP_SIGNATURES, --dump-signatures=DUMP_SIGNATURES
|
||||
Dump out the signature construction information, with
|
||||
no task execution. The SIGNATURE_HANDLER parameter is
|
||||
passed to the handler. Two common values are none and
|
||||
printdiff but the handler may define more/less. none
|
||||
means only dump the signature, printdiff means compare
|
||||
the dumped signature with the cached one.
|
||||
no task execution. Parameters are passed to the
|
||||
signature handling code, use 'none' if no specific
|
||||
handler is required.
|
||||
-p, --parse-only Quit after parsing the BB recipes.
|
||||
-s, --show-versions Show current and preferred versions of all recipes.
|
||||
-e, --environment Show the global or per-recipe environment complete
|
||||
-e, --environment Show the global or per-package environment complete
|
||||
with information about where variables were
|
||||
set/changed.
|
||||
-g, --graphviz Save dependency tree information for the specified
|
||||
@@ -532,8 +493,6 @@
|
||||
-u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server to use, process or xmlrpc.
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
floating revisions have changed or not.
|
||||
--server-only Run bitbake without a UI, only starting a server
|
||||
@@ -600,14 +559,14 @@
|
||||
when one wants to manage multiple <filename>.bb</filename>
|
||||
files.
|
||||
Clearly there needs to be a way to tell BitBake what
|
||||
files are available and, of those, which you
|
||||
files are available, and of those, which you
|
||||
want to execute.
|
||||
There also needs to be a way for each recipe
|
||||
to express its dependencies, both for build-time and
|
||||
runtime.
|
||||
There must be a way for you to express recipe preferences
|
||||
when multiple recipes provide the same functionality, or when
|
||||
there are multiple versions of a recipe.
|
||||
there are multiple versions of a recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -159,10 +159,6 @@
|
||||
using the "+=" and "=+" operators.
|
||||
These operators insert a space between the current
|
||||
value and prepended or appended value.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -182,10 +178,6 @@
|
||||
<para>
|
||||
If you want to append or prepend values without an
|
||||
inserted space, use the ".=" and "=." operators.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -206,13 +198,6 @@
|
||||
You can also append and prepend a variable's value
|
||||
using an override style syntax.
|
||||
When you use this syntax, no spaces are inserted.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators differ from the ":=", ".=", "=.", "+=", and "=+"
|
||||
operators in that their effects are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -232,6 +217,13 @@
|
||||
override syntax.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The operators "_append" and "_prepend" differ from
|
||||
the operators ".=" and "=." in that they are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='removing-override-style-syntax'>
|
||||
@@ -291,18 +283,7 @@
|
||||
The variable <filename>FOO</filename> has two flags:
|
||||
<filename>a</filename> and <filename>b</filename>.
|
||||
The flags are immediately set to "abc" and "123", respectively.
|
||||
The <filename>a</filename> flag becomes "abc 456".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
No need exists to pre-define variable flags.
|
||||
You can simply start using them.
|
||||
One extremely common application
|
||||
is to attach some brief documentation to a BitBake variable as
|
||||
follows:
|
||||
<literallayout class='monospaced'>
|
||||
CACHE[doc] = "The directory holding the cache of the metadata."
|
||||
</literallayout>
|
||||
The <filename>a</filename> flag becomes "abc456".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -317,19 +298,7 @@
|
||||
DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
|
||||
</literallayout>
|
||||
This example results in the <filename>DATE</filename>
|
||||
variable being set to the current date.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Probably the most common use of this feature is to extract
|
||||
the value of variables from BitBake's internal data dictionary,
|
||||
<filename>d</filename>.
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
variable becoming the current date.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -403,25 +372,6 @@
|
||||
You select the os-specific version of the <filename>TEST</filename>
|
||||
variable by appending the "os" override to the variable
|
||||
(i.e.<filename>TEST_os</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To better understand this, consider a practical example
|
||||
that assumes an OpenEmbedded metadata-based Linux
|
||||
kernel recipe file.
|
||||
The following lines from the recipe file first set
|
||||
the kernel branch variable <filename>KBRANCH</filename>
|
||||
to a default value, then conditionally override that
|
||||
value based on the architecture of the build:
|
||||
<literallayout class='monospaced'>
|
||||
KBRANCH = "standard/base"
|
||||
KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
|
||||
KBRANCH_qemumips = "standard/mti-malta32"
|
||||
KBRANCH_qemuppc = "standard/qemuppc"
|
||||
KBRANCH_qemux86 = "standard/common-pc/base"
|
||||
KBRANCH_qemux86-64 = "standard/common-pc-64/base"
|
||||
KBRANCH_qemumips64 = "standard/mti-malta64"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Appending and Prepending:</emphasis>
|
||||
BitBake also supports append and prepend operations to
|
||||
@@ -435,19 +385,6 @@
|
||||
</literallayout>
|
||||
In this example, <filename>DEPENDS</filename> becomes
|
||||
"glibc ncurses libmad".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Again, using an OpenEmbedded metadata-based
|
||||
kernel recipe file as an example, the
|
||||
following lines will conditionally append to the
|
||||
<filename>KERNEL_FEATURES</filename> variable based
|
||||
on the architecture:
|
||||
<literallayout class='monospaced'>
|
||||
KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -952,7 +889,7 @@
|
||||
<listitem><para>
|
||||
The class needs to define the function as follows:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>classname</replaceable><filename>_</filename><replaceable>functionname</replaceable>
|
||||
<classname>_<functionname>
|
||||
</literallayout>
|
||||
For example, if you have a class file
|
||||
<filename>bar.bbclass</filename> and a function named
|
||||
@@ -966,7 +903,7 @@
|
||||
The class needs to contain the <filename>EXPORT_FUNCTIONS</filename>
|
||||
statement as follows:
|
||||
<literallayout class='monospaced'>
|
||||
EXPORT_FUNCTIONS <replaceable>functionname</replaceable>
|
||||
EXPORT_FUNCTIONS <functionname>
|
||||
</literallayout>
|
||||
For example, continuing with the same example, the
|
||||
statement in the <filename>bar.bbclass</filename> would be
|
||||
@@ -1065,41 +1002,13 @@
|
||||
<title>Deleting a Task</title>
|
||||
|
||||
<para>
|
||||
As well as being able to add tasks, you can delete them.
|
||||
Simply use the <filename>deltask</filename> command to
|
||||
delete a task.
|
||||
As well as being able to add tasks, tasks can also be deleted.
|
||||
This is done simply with <filename>deltask</filename> command.
|
||||
For example, to delete the example task used in the previous
|
||||
sections, you would use:
|
||||
<literallayout class='monospaced'>
|
||||
deltask printdate
|
||||
</literallayout>
|
||||
If you delete a task using the <filename>deltask</filename>
|
||||
command and the task has dependencies, the dependencies are
|
||||
not reconnected.
|
||||
For example, suppose you have three tasks named
|
||||
<filename>do_a</filename>, <filename>do_b</filename>, and
|
||||
<filename>do_c</filename>.
|
||||
Furthermore, <filename>do_c</filename> is dependent on
|
||||
<filename>do_b</filename>, which in turn is dependent on
|
||||
<filename>do_a</filename>.
|
||||
Given this scenario, if you use <filename>deltask</filename>
|
||||
to delete <filename>do_b</filename>, the implicit dependency
|
||||
relationship between <filename>do_c</filename> and
|
||||
<filename>do_a</filename> through <filename>do_b</filename>
|
||||
no longer exists, and <filename>do_c</filename> dependencies
|
||||
are not updated to include <filename>do_a</filename>.
|
||||
Thus, <filename>do_c</filename> is free to run before
|
||||
<filename>do_a</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you want dependencies such as these to remain intact, use
|
||||
the <filename>noexec</filename> varflag to disable the task
|
||||
instead of using the <filename>deltask</filename> command to
|
||||
delete it:
|
||||
<literallayout class='monospaced'>
|
||||
do_b[noexec] = "1"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1163,11 +1072,13 @@
|
||||
<para>
|
||||
The <filename>BB_ORIGENV</filename> variable returns a datastore
|
||||
object that can be queried using the standard datastore operators
|
||||
such as <filename>getVar(, False)</filename>.
|
||||
such as <filename>getVar()</filename>.
|
||||
The datastore object is useful, for example, to find the original
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BB_ORIGENV - add example?
|
||||
|
||||
origenv = d.getVar("BB_ORIGENV", False)
|
||||
bar = origenv.getVar("BAR", False)
|
||||
</literallayout>
|
||||
@@ -1192,7 +1103,7 @@
|
||||
BitBake reads and writes varflags to the datastore using the following
|
||||
command forms:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>variable</replaceable> = d.getVarFlags("<replaceable>variable</replaceable>")
|
||||
<variable> = d.getVarFlags("<variable>")
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -1213,36 +1124,11 @@
|
||||
Tasks support a number of these flags which control various
|
||||
functionality of the task:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>dirs:</emphasis>
|
||||
Directories that should be created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>lockfiles:</emphasis>
|
||||
Specifies one or more lockfiles to lock while the task
|
||||
executes.
|
||||
Only one task may hold a lockfile, and any task that
|
||||
attempts to lock an already locked file will block until
|
||||
the lock is released.
|
||||
You can use this variable flag to accomplish mutual
|
||||
exclusion.
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>noexec:</emphasis>
|
||||
Marks the tasks as being empty and no execution required.
|
||||
@@ -1254,20 +1140,21 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
<listitem><para><emphasis>fakeroot:</emphasis>
|
||||
Causes a task to be run in a fakeroot environment,
|
||||
obtained by adding the variables in
|
||||
<link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
|
||||
to the environment.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdeptask:</emphasis>
|
||||
@@ -1280,11 +1167,6 @@
|
||||
"<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recideptask:</emphasis>
|
||||
When set in conjunction with
|
||||
<filename>recrdeptask</filename>, specifies a task that
|
||||
should be inspected for additional dependencies.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recrdeptask:</emphasis>
|
||||
Controls task recursive runtime dependencies.
|
||||
See the
|
||||
@@ -1295,14 +1177,35 @@
|
||||
"<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>stamp-extra-info:</emphasis>
|
||||
Extra stamp information to append to the task's stamp.
|
||||
As an example, OpenEmbedded uses this flag to allow
|
||||
machine-specific tasks.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -1322,16 +1225,16 @@
|
||||
does not allow BitBake to automatically determine
|
||||
that the variable is referred to.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalue:</emphasis>
|
||||
If set, instructs BitBake to ignore the actual
|
||||
value of the variable and instead use the specified
|
||||
value when calculating the variable's signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalueexclude:</emphasis>
|
||||
Specifies a pipe-separated list of strings to exclude
|
||||
from the variable's value when calculating the
|
||||
@@ -1348,8 +1251,8 @@
|
||||
BitBake allows installation of event handlers within
|
||||
recipe and class files.
|
||||
Events are triggered at certain points during operation,
|
||||
such as the beginning of an operation against a given recipe
|
||||
(<filename>*.bb</filename> file), the start of a given task,
|
||||
such as the beginning of operation against a given
|
||||
<filename>.bb</filename>, the start of a given task,
|
||||
task failure, task success, and so forth.
|
||||
The intent is to make it easy to do things like email
|
||||
notification on build failure.
|
||||
@@ -1377,27 +1280,6 @@
|
||||
the name of the triggered event.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because you probably are only interested in a subset of events,
|
||||
you would likely use the <filename>[eventmask]</filename> flag
|
||||
for your event handler to be sure that only certain events
|
||||
trigger the handler.
|
||||
Given the previous example, suppose you only wanted the
|
||||
<filename>bb.build.TaskFailed</filename> event to trigger that
|
||||
event handler.
|
||||
Use the flag as follows:
|
||||
<literallayout class='monospaced'>
|
||||
addhandler myclass_eventhandler
|
||||
myclass_eventhandler[eventmask] = "bb.build.TaskFailed"
|
||||
python myclass_eventhandler() {
|
||||
from bb.event import getName
|
||||
from bb import data
|
||||
print("The name of the Event is %s" % getName(e))
|
||||
print("The file we run for is %s" % data.getVar('FILE', e.data, True))
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
During a standard build, the following common events might occur:
|
||||
<itemizedlist>
|
||||
@@ -1584,9 +1466,9 @@
|
||||
complete before that task can be executed.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_configure[deptask] = "do_populate_sysroot"
|
||||
do_configure[deptask] = "do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of each item in <filename>DEPENDS</filename> must complete before
|
||||
<filename>do_configure</filename> can execute.
|
||||
</para>
|
||||
@@ -1612,11 +1494,11 @@
|
||||
item runtime dependency which must have completed before that
|
||||
task can be executed.
|
||||
<literallayout class='monospaced'>
|
||||
do_package_qa[rdeptask] = "do_packagedata"
|
||||
do_package_write[rdeptask] = "do_package"
|
||||
</literallayout>
|
||||
In the previous example, the <filename>do_packagedata</filename>
|
||||
In the previous example, the <filename>do_package</filename>
|
||||
task of each item in <filename>RDEPENDS</filename> must have
|
||||
completed before <filename>do_package_qa</filename> can execute.
|
||||
completed before <filename>do_package_write</filename> can execute.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1661,9 +1543,9 @@
|
||||
the data in <filename>DEPENDS</filename>.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_patch[depends] = "quilt-native:do_populate_sysroot"
|
||||
do_patch[depends] = "quilt-native:do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of the target <filename>quilt-native</filename>
|
||||
must have completed before the
|
||||
<filename>do_patch</filename> task can execute.
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
<link linkend='var-DEFAULT_PREFERENCE'>D</link>
|
||||
<link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
|
||||
<link linkend='var-FAKEROOT'>F</link>
|
||||
<link linkend='var-GITDIR'>G</link>
|
||||
<link linkend='var-HGDIR'>H</link>
|
||||
<!-- <link linkend='var-GROUPADD_PARAM'>G</link> -->
|
||||
<link linkend='var-HOMEPAGE'>H</link>
|
||||
<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
|
||||
<!-- <link linkend='var-glossary-j'>J</link> -->
|
||||
<!-- <link linkend='var-KARCH'>K</link> -->
|
||||
@@ -102,56 +102,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Specifies a space-delimited list of hosts that the fetcher
|
||||
is allowed to use to obtain the required source code.
|
||||
Following are considerations surrounding this variable:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
This host list is only used if
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
is either not set or set to "0".
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Limited support for wildcard matching against the
|
||||
beginning of host names exists.
|
||||
For example, the following setting matches
|
||||
<filename>git.gnu.org</filename>,
|
||||
<filename>ftp.gnu.org</filename>, and
|
||||
<filename>foo.git.gnu.org</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
BB_ALLOWED_NETWORKS = "*.gnu.org"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mirrors not in the host list are skipped and
|
||||
logged in debug.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Attempts to access networks not in the host list
|
||||
cause a failure.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Using <filename>BB_ALLOWED_NETWORKS</filename> in
|
||||
conjunction with
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
is very useful.
|
||||
Adding the host you want to use to
|
||||
<filename>PREMIRRORS</filename> results in the source code
|
||||
being fetched from an allowed location and avoids raising
|
||||
an error when a host that is not allowed is in a
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
statement.
|
||||
This is because the fetcher does not attempt to use the
|
||||
host listed in <filename>SRC_URI</filename> after a
|
||||
successful fetch from the
|
||||
<filename>PREMIRRORS</filename> occurs.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -856,56 +806,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_IONICE_LEVEL'><glossterm>BB_TASK_IONICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows adjustment of a task's Input/Output priority.
|
||||
During Autobuilder testing, random failures can occur
|
||||
for tasks due to I/O starvation.
|
||||
These failures occur during various QEMU runtime timeouts.
|
||||
You can use the <filename>BB_TASK_IONICE_LEVEL</filename>
|
||||
variable to adjust the I/O priority of these tasks.
|
||||
<note>
|
||||
This variable works similarly to the
|
||||
<link linkend='var-BB_TASK_NICE_LEVEL'><filename>BB_TASK_NICE_LEVEL</filename></link>
|
||||
variable except with a task's I/O priorities.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Set the variable as follows:
|
||||
<literallayout class='monospaced'>
|
||||
BB_TASK_IONICE_LEVEL = "<replaceable>class</replaceable>.<replaceable>prio</replaceable>"
|
||||
</literallayout>
|
||||
For <replaceable>class</replaceable>, the default value is
|
||||
"2", which is a best effort.
|
||||
You can use "1" for realtime and "3" for idle.
|
||||
If you want to use realtime, you must have superuser
|
||||
privileges.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For <replaceable>prio</replaceable>, you can use any
|
||||
value from "0", which is the highest priority, to "7",
|
||||
which is the lowest.
|
||||
The default value is "4".
|
||||
You do not need any special privileges to use this range
|
||||
of priority values.
|
||||
<note>
|
||||
In order for your I/O priority settings to take effect,
|
||||
you need the Completely Fair Queuing (CFQ) Scheduler
|
||||
selected for the backing block device.
|
||||
To select the scheduler, use the following command form
|
||||
where <replaceable>device</replaceable> is the device
|
||||
(e.g. sda, sdb, and so forth):
|
||||
<literallayout class='monospaced'>
|
||||
$ sudo sh -c “echo cfq > /sys/block/<replaceable>device</replaceable>/queu/scheduler
|
||||
</literallayout>
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_TASK_NICE_LEVEL'><glossterm>BB_TASK_NICE_LEVEL</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -972,7 +872,7 @@
|
||||
that run on the target <filename>MACHINE</filename>;
|
||||
"nativesdk", which targets the SDK machine instead of
|
||||
<filename>MACHINE</filename>; and "mulitlibs" in the form
|
||||
"<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
|
||||
"<filename>multilib:<multilib_name></filename>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -984,7 +884,7 @@
|
||||
metadata:
|
||||
<literallayout class='monospaced'>
|
||||
BBCLASSEXTEND =+ "native nativesdk"
|
||||
BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
|
||||
BBCLASSEXTEND =+ "multilib:<multilib_name>"
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1116,20 +1016,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the base location where layers are stored.
|
||||
By default, this location is set to
|
||||
<filename>${COREBASE}</filename>.
|
||||
This setting is used in conjunction with
|
||||
<filename>bitbake-layers layerindex-fetch</filename> and
|
||||
tells <filename>bitbake-layers</filename> where to place
|
||||
the fetched layers.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1205,9 +1091,9 @@
|
||||
Set the variable as you would any environment variable
|
||||
and then run BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>build_directory</replaceable>"
|
||||
$ BBPATH="<build_directory>"
|
||||
$ export BBPATH
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1223,15 +1109,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBTARGETS'><glossterm>BBTARGETS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Allows you to use a configuration file to add to the list
|
||||
of command-line target recipes you want to build.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBVERSIONS'><glossterm>BBVERSIONS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1277,15 +1154,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Bazaar
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-c'><title>C</title>
|
||||
@@ -1300,15 +1168,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out under the
|
||||
CVS system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-d'><title>D</title>
|
||||
@@ -1553,32 +1412,13 @@
|
||||
|
||||
</glossdiv>
|
||||
|
||||
|
||||
<!--
|
||||
<glossdiv id='var-glossary-g'><title>G</title>
|
||||
|
||||
<glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Git repository
|
||||
is stored when it is cloned.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
-->
|
||||
|
||||
<glossdiv id='var-glossary-h'><title>H</title>
|
||||
|
||||
<glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Mercurial
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
|
||||
<glossdef>
|
||||
<para>Website where more information about the software the recipe is building
|
||||
@@ -1720,16 +1560,8 @@
|
||||
BitBake uses <filename>OVERRIDES</filename> to control
|
||||
what variables are overridden after BitBake parses
|
||||
recipes and configuration files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following is a simple example that uses an overrides
|
||||
list based on machine architectures:
|
||||
<literallayout class='monospaced'>
|
||||
OVERRIDES = "arm:x86:mips:powerpc"
|
||||
</literallayout>
|
||||
You can find information on how to use
|
||||
<filename>OVERRIDES</filename> in the
|
||||
You can find more information on how overrides are handled
|
||||
in the
|
||||
"<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
|
||||
section.
|
||||
</para>
|
||||
@@ -1912,28 +1744,16 @@
|
||||
<glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
A list of aliases by which a particular recipe can be
|
||||
known.
|
||||
By default, a recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its <filename>PROVIDES</filename>
|
||||
list.
|
||||
If a recipe uses <filename>PROVIDES</filename>, the
|
||||
additional aliases are synonyms for the recipe and can
|
||||
be useful satisfying dependencies of other recipes during
|
||||
the build as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Consider the following example
|
||||
<filename>PROVIDES</filename> statement from a recipe
|
||||
file <filename>libav_0.8.11.bb</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "libpostproc"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
A list of aliases that a recipe also provides.
|
||||
These aliases are useful for satisfying dependencies of
|
||||
other recipes during the build (as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>).
|
||||
<note>
|
||||
A recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its
|
||||
<filename>PROVIDES</filename> list.
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
@@ -2011,7 +1831,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RDEPENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RDEPENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2077,7 +1897,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RRECOMMENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RRECOMMENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2260,15 +2080,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Subversion
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-t'><title>T</title>
|
||||
|
||||
@@ -313,13 +313,6 @@ a:hover {
|
||||
/*font-weight: bold;*/
|
||||
}
|
||||
|
||||
/* This style defines how the permalink character
|
||||
appears by itself and when hovered over with
|
||||
the mouse. */
|
||||
|
||||
[alt='Permalink'] { color: #eee; }
|
||||
[alt='Permalink']:hover { color: black; }
|
||||
|
||||
|
||||
div.informalfigure,
|
||||
div.informalexample,
|
||||
@@ -800,6 +793,7 @@ div.sect2 .titlepage .title {
|
||||
|
||||
h1.title {
|
||||
background-color: transparent;
|
||||
background-image: url("figures/yocto-project-bw.png");
|
||||
background-repeat: no-repeat;
|
||||
height: 256px;
|
||||
text-indent: -9000px;
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
-->
|
||||
|
||||
<copyright>
|
||||
<year>2004-2015</year>
|
||||
<year>2004-2014</year>
|
||||
<holder>Richard Purdie</holder>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>and Phil Blundell</holder>
|
||||
|
||||
@@ -89,7 +89,7 @@ quit after parsing the BB files (developers only)
|
||||
show current and preferred versions of all packages
|
||||
.TP
|
||||
.B \-e, \-\-environment
|
||||
show the global or per-recipe environment (this is what used to be bbread)
|
||||
show the global or per-package environment (this is what used to be bbread)
|
||||
.TP
|
||||
.B \-g, \-\-graphviz
|
||||
emit the dependency trees of the specified packages in the dot syntax
|
||||
|
||||
39
bitbake/doc/template/component.title.xsl
vendored
39
bitbake/doc/template/component.title.xsl
vendored
@@ -1,39 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="component.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="ancestor::d:section">
|
||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
||||
</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
||||
<xsl:otherwise>1</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:if test="$generate.id.attributes = 0">
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:element>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/division.title.xsl
vendored
25
bitbake/doc/template/division.title.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="division.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<h1>
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</h1>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
|
||||
21
bitbake/doc/template/formal.object.heading.xsl
vendored
21
bitbake/doc/template/formal.object.heading.xsl
vendored
@@ -1,21 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="formal.object.heading">
|
||||
<xsl:param name="object" select="."/>
|
||||
<xsl:param name="title">
|
||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
</xsl:param>
|
||||
<p class="title">
|
||||
<b><xsl:copy-of select="$title"/></b>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$object"/>
|
||||
</xsl:call-template>
|
||||
</p>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
@@ -1,14 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<xsl:template match="glossentry/glossterm">
|
||||
<xsl:apply-imports/>
|
||||
<xsl:if test="$generate.permalink != 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select=".."/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/permalinks.xsl
vendored
25
bitbake/doc/template/permalinks.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
|
||||
<xsl:param name="generate.permalink" select="1"/>
|
||||
<xsl:param name="permalink.text">¶</xsl:param>
|
||||
|
||||
<xsl:template name="permalink">
|
||||
<xsl:param name="node"/>
|
||||
|
||||
<xsl:if test="$generate.permalink != '0'">
|
||||
<span class="permalink">
|
||||
<a alt="Permalink" title="Permalink">
|
||||
<xsl:attribute name="href">
|
||||
<xsl:call-template name="href.target">
|
||||
<xsl:with-param name="object" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:attribute>
|
||||
<xsl:copy-of select="$permalink.text"/>
|
||||
</a>
|
||||
</span>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
55
bitbake/doc/template/section.title.xsl
vendored
55
bitbake/doc/template/section.title.xsl
vendored
@@ -1,55 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="section.title">
|
||||
<xsl:variable name="section"
|
||||
select="(ancestor::section |
|
||||
ancestor::simplesect|
|
||||
ancestor::sect1|
|
||||
ancestor::sect2|
|
||||
ancestor::sect3|
|
||||
ancestor::sect4|
|
||||
ancestor::sect5)[last()]"/>
|
||||
|
||||
<xsl:variable name="renderas">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
|
||||
<xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$renderas != ''">
|
||||
<xsl:value-of select="$renderas"/>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:call-template name="section.level">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:call-template name="section.heading">
|
||||
<xsl:with-param name="section" select="$section"/>
|
||||
<xsl:with-param name="level" select="$level"/>
|
||||
<xsl:with-param name="title">
|
||||
<xsl:apply-templates select="$section" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:if test="$level > 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:with-param>
|
||||
</xsl:call-template>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.28.0"
|
||||
__version__ = "1.22.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
@@ -94,16 +94,17 @@ def note(*args):
|
||||
def warn(*args):
|
||||
logger.warn(''.join(args))
|
||||
|
||||
def error(*args, **kwargs):
|
||||
logger.error(''.join(args), extra=kwargs)
|
||||
def error(*args):
|
||||
logger.error(''.join(args))
|
||||
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
sys.exit(1)
|
||||
|
||||
def fatal(*args, **kwargs):
|
||||
logger.critical(''.join(args), extra=kwargs)
|
||||
raise BBHandledException()
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
"""This is a decorator which can be used to mark functions
|
||||
as deprecated. It will result in a warning being emitted
|
||||
as deprecated. It will result in a warning being emmitted
|
||||
when the function is used."""
|
||||
import warnings
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -31,7 +31,6 @@ import logging
|
||||
import shlex
|
||||
import glob
|
||||
import time
|
||||
import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
@@ -43,22 +42,9 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
__mtime_cache = {}
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if f not in __mtime_cache:
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def reset_cache():
|
||||
global __mtime_cache
|
||||
__mtime_cache = {}
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# When we execute a python function we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
__builtins__['bb'] = bb
|
||||
@@ -157,9 +143,9 @@ class LogTee(object):
|
||||
self.outfile.flush()
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
"""Execute an BB 'function'"""
|
||||
|
||||
body = d.getVar(func, False)
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
@@ -242,7 +228,7 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
with open(runfile, 'w') as script:
|
||||
bb.data.emit_func_python(func, script, d)
|
||||
script.write(code)
|
||||
|
||||
if cwd:
|
||||
try:
|
||||
@@ -256,9 +242,10 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
||||
raise
|
||||
except:
|
||||
if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
|
||||
raise
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
@@ -313,7 +300,7 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
# cleanup
|
||||
ret=$?
|
||||
trap '' 0
|
||||
exit $ret
|
||||
exit $?
|
||||
''')
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
@@ -329,52 +316,14 @@ exit $ret
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
def readfifo(data):
|
||||
lines = data.split('\0')
|
||||
for line in lines:
|
||||
splitval = line.split(' ', 1)
|
||||
cmd = splitval[0]
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1]
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
tempdir = d.getVar('T', True)
|
||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
||||
if os.path.exists(fifopath):
|
||||
os.unlink(fifopath)
|
||||
os.mkfifo(fifopath)
|
||||
with open(fifopath, 'r+') as fifo:
|
||||
try:
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
finally:
|
||||
os.unlink(fifopath)
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
|
||||
@@ -413,13 +362,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
nice = int(nice) - curnice
|
||||
newnice = os.nice(nice)
|
||||
logger.debug(1, "Renice to %s " % newnice)
|
||||
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True)
|
||||
if ionice:
|
||||
try:
|
||||
cls, prio = ionice.split(".", 1)
|
||||
bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
|
||||
except:
|
||||
bb.warn("Invalid ionice level %s" % ionice)
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
@@ -455,10 +397,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
if getattr(record, 'forcelog', False):
|
||||
self.triggered = False
|
||||
else:
|
||||
self.triggered = True
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = open('/dev/null', 'r')
|
||||
@@ -479,7 +418,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
os.dup2(logfile.fileno(), oso[1])
|
||||
os.dup2(logfile.fileno(), ose[1])
|
||||
|
||||
# Ensure Python logging goes to the logfile
|
||||
# Ensure python logging goes to the logfile
|
||||
handler = logging.StreamHandler(logfile)
|
||||
handler.setFormatter(logformatter)
|
||||
# Always enable full debug output into task logfiles
|
||||
@@ -568,7 +507,7 @@ def exec_task(fn, task, d, profile = False):
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
def stamp_internal(taskname, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Makes sure the stamp directory exists
|
||||
@@ -589,16 +528,13 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if baseonly:
|
||||
return stamp
|
||||
|
||||
if not stamp:
|
||||
return
|
||||
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if cached_mtime_noerror(stampdir) == 0:
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
|
||||
return stamp
|
||||
@@ -656,9 +592,8 @@ def make_stamp(task, d, file_name = None):
|
||||
# If we're in task context, write out a signature file for each task
|
||||
# as it completes
|
||||
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
||||
stampbase = stamp_internal(task, d, None, True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
|
||||
|
||||
def del_stamp(task, d, file_name = None):
|
||||
"""
|
||||
@@ -693,8 +628,8 @@ def stampfile(taskname, d, file_name = None):
|
||||
"""
|
||||
return stamp_internal(taskname, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, d):
|
||||
task_deps = d.getVar('_task_deps', False)
|
||||
def add_tasks(tasklist, deltasklist, d):
|
||||
task_deps = d.getVar('_task_deps')
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
@@ -705,6 +640,9 @@ def add_tasks(tasklist, d):
|
||||
for task in tasklist:
|
||||
task = d.expand(task)
|
||||
|
||||
if task in deltasklist:
|
||||
continue
|
||||
|
||||
d.setVarFlag(task, 'task', 1)
|
||||
|
||||
if not task in task_deps['tasks']:
|
||||
@@ -741,8 +679,8 @@ def addtask(task, before, after, d):
|
||||
task = "do_" + task
|
||||
|
||||
d.setVarFlag(task, "task", 1)
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task not in bbtasks:
|
||||
bbtasks = d.getVar('__BBTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
|
||||
@@ -764,14 +702,8 @@ def deltask(task, d):
|
||||
if task[:3] != "do_":
|
||||
task = "do_" + task
|
||||
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
if task in bbtasks:
|
||||
bbtasks.remove(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
bbtasks = d.getVar('__BBDELTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBDELTASKS', bbtasks)
|
||||
|
||||
d.delVarFlag(task, 'deps')
|
||||
for bbtask in d.getVar('__BBTASKS', False) or []:
|
||||
deps = d.getVarFlag(bbtask, 'deps') or []
|
||||
if task in deps:
|
||||
deps.remove(task)
|
||||
d.setVarFlag(bbtask, 'deps', deps)
|
||||
|
||||
@@ -43,7 +43,7 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "148"
|
||||
__cache_version__ = "147"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -85,8 +85,8 @@ class RecipeInfoCommon(object):
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata, expand = True):
|
||||
return metadata.getVar(var, expand) or ''
|
||||
def getvar(cls, var, metadata):
|
||||
return metadata.getVar(var, True) or ''
|
||||
|
||||
|
||||
class CoreRecipeInfo(RecipeInfoCommon):
|
||||
@@ -142,7 +142,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
|
||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
|
||||
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
||||
@@ -225,16 +225,14 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
for package in self.packages_dynamic:
|
||||
cachedata.packages_dynamic[package].append(fn)
|
||||
|
||||
# Build hash of runtime depends and recommends
|
||||
# Build hash of runtime depends and rececommends
|
||||
for package in self.packages + [self.pn]:
|
||||
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
||||
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if self.not_world:
|
||||
logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
|
||||
else:
|
||||
if not self.not_world:
|
||||
cachedata.possible_world.append(fn)
|
||||
|
||||
# create a collection of all targets for sanity checking
|
||||
@@ -261,7 +259,7 @@ class Cache(object):
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used later for deciding whether we
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
@@ -528,25 +526,9 @@ class Cache(object):
|
||||
|
||||
if hasattr(info_array[0], 'file_checksums'):
|
||||
for _, fl in info_array[0].file_checksums.items():
|
||||
fl = fl.strip()
|
||||
while fl:
|
||||
# A .split() would be simpler but means spaces or colons in filenames would break
|
||||
a = fl.find(":True")
|
||||
b = fl.find(":False")
|
||||
if ((a < 0) and b) or ((b > 0) and (b < a)):
|
||||
f = fl[:b+6]
|
||||
fl = fl[b+7:]
|
||||
elif ((b < 0) and a) or ((a > 0) and (a < b)):
|
||||
f = fl[:a+5]
|
||||
fl = fl[a+6:]
|
||||
else:
|
||||
break
|
||||
fl = fl.strip()
|
||||
if "*" in f:
|
||||
continue
|
||||
f, exist = f.split(":")
|
||||
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s changed",
|
||||
for f in fl.split():
|
||||
if not ('*' in f or os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s was removed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
@@ -636,13 +618,10 @@ class Cache(object):
|
||||
def mtime(cachefile):
|
||||
return bb.parse.cached_mtime_noerror(cachefile)
|
||||
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None):
|
||||
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
|
||||
cacheData.add_from_recipeinfo(filename, info_array)
|
||||
|
||||
if watcher:
|
||||
watcher(info_array[0].file_depends)
|
||||
|
||||
if not self.has_cache:
|
||||
return
|
||||
|
||||
@@ -672,25 +651,25 @@ class Cache(object):
|
||||
"""
|
||||
chdir_back = False
|
||||
|
||||
from bb import parse
|
||||
from bb import data, parse
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
parse.cached_mtime_noerror(bbfile_loc)
|
||||
bb_data = config.createCopy()
|
||||
bb_data = data.init_db(config)
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not bb_data.getVar('TOPDIR', False):
|
||||
if not data.getVar('TOPDIR', bb_data):
|
||||
chdir_back = True
|
||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
||||
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||
try:
|
||||
if appends:
|
||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
||||
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||
bb_data = parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
@@ -713,7 +692,7 @@ def init(cooker):
|
||||
|
||||
* Its mtime
|
||||
* The mtimes of all its dependencies
|
||||
* Whether it caused a parse.SkipRecipe exception
|
||||
* Whether it caused a parse.SkipPackage exception
|
||||
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
@@ -783,6 +762,16 @@ class MultiProcessCache(object):
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
@@ -823,7 +812,15 @@ class MultiProcessCache(object):
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
data = self.cachedata
|
||||
try:
|
||||
with open(self.cachefile, "rb") as f:
|
||||
p = pickle.Unpickler(f)
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
@@ -832,16 +829,16 @@ class MultiProcessCache(object):
|
||||
p = pickle.Unpickler(fd)
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
os.unlink(f)
|
||||
continue
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
os.unlink(f)
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
with open(self.cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
@@ -33,87 +33,9 @@ def check_indent(codestr):
|
||||
return codestr
|
||||
|
||||
|
||||
# Basically pickle, in python 2.7.3 at least, does badly with data duplication
|
||||
# upon pickling and unpickling. Combine this with duplicate objects and things
|
||||
# are a mess.
|
||||
#
|
||||
# When the sets are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file.
|
||||
#
|
||||
# By having shell and python cacheline objects with setstate/getstate, we force
|
||||
# the object creation through our own routine where we can call intern (via internSet).
|
||||
#
|
||||
# We also use hashable frozensets and ensure we use references to these so that
|
||||
# duplicates can be removed, both in memory and in the resulting pickled data.
|
||||
#
|
||||
# By playing these games, the size of the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. Smaller cache files, faster load times and lower memory usage is good.
|
||||
#
|
||||
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
|
||||
# avoiding duplication of the attribute names!
|
||||
|
||||
class SetCache(object):
|
||||
def __init__(self):
|
||||
self.setcache = {}
|
||||
|
||||
def internSet(self, items):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
|
||||
class pythonCacheLine(object):
|
||||
def __init__(self, refs, execs, contains):
|
||||
self.refs = codecache.internSet(refs)
|
||||
self.execs = codecache.internSet(execs)
|
||||
self.contains = {}
|
||||
for c in contains:
|
||||
self.contains[c] = codecache.internSet(contains[c])
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.refs, self.execs, self.contains)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(refs, execs, contains) = state
|
||||
self.__init__(refs, execs, contains)
|
||||
def __hash__(self):
|
||||
l = (hash(self.refs), hash(self.execs))
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
def __repr__(self):
|
||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
||||
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
self.execs = codecache.internSet(execs)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.execs)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(execs) = state
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
def __repr__(self):
|
||||
return str(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 7
|
||||
CACHE_VERSION = 6
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -122,38 +44,32 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
# To avoid duplication in the codeparser cache, keep
|
||||
# a lookup of hashes of objects we already have
|
||||
self.pythoncachelines = {}
|
||||
self.shellcachelines = {}
|
||||
|
||||
def newPythonCacheLine(self, refs, execs, contains):
|
||||
cacheline = pythonCacheLine(refs, execs, contains)
|
||||
h = hash(cacheline)
|
||||
if h in self.pythoncachelines:
|
||||
return self.pythoncachelines[h]
|
||||
self.pythoncachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def newShellCacheLine(self, execs):
|
||||
cacheline = shellCacheLine(execs)
|
||||
h = hash(cacheline)
|
||||
if h in self.shellcachelines:
|
||||
return self.shellcachelines[h]
|
||||
self.shellcachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def init_cache(self, d):
|
||||
# Check if we already have the caches
|
||||
if self.pythoncache:
|
||||
return
|
||||
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for k in data[0][h]["contains"]:
|
||||
data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
@@ -187,7 +103,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -249,25 +165,18 @@ class PythonParser():
|
||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||
|
||||
def parse_python(self, node):
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncache[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncache[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncache[h]["contains"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = set(codeparsercache.pythoncacheextras[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncacheextras[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncacheextras[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncacheextras[h]["contains"]
|
||||
return
|
||||
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
@@ -279,7 +188,10 @@ class PythonParser():
|
||||
|
||||
self.execs.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
codeparsercache.pythoncacheextras[h]["contains"] = self.contains
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -298,17 +210,18 @@ class ShellParser():
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = set(codeparsercache.shellcacheextras[h].execs)
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
self._parse_shell(value)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -68,12 +68,10 @@ class Command:
|
||||
if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
|
||||
return None, "Not able to execute not readonly commands in readonly mode"
|
||||
try:
|
||||
if getattr(command_method, 'needconfig', False):
|
||||
self.cooker.updateCacheSync()
|
||||
result = command_method(self, commandline)
|
||||
except CommandError as exc:
|
||||
return None, exc.args[0]
|
||||
except (Exception, SystemExit):
|
||||
except Exception:
|
||||
import traceback
|
||||
return None, traceback.format_exc()
|
||||
else:
|
||||
@@ -88,10 +86,7 @@ class Command:
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
|
||||
# updateCache will trigger a shutdown of the parser
|
||||
# and then raise BBHandledException triggering an exit
|
||||
self.cooker.updateCache()
|
||||
if self.cooker.state == bb.cooker.state.error:
|
||||
return False
|
||||
if self.currentAsyncCommand is not None:
|
||||
(command, options) = self.currentAsyncCommand
|
||||
@@ -125,11 +120,11 @@ class Command:
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
if msg or msg == "":
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.event_data)
|
||||
elif code:
|
||||
bb.event.fire(CommandExit(code), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandExit(code), self.cooker.event_data)
|
||||
else:
|
||||
bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandCompleted(), self.cooker.event_data)
|
||||
self.currentAsyncCommand = None
|
||||
self.cooker.finishcommand()
|
||||
|
||||
@@ -181,16 +176,6 @@ class CommandsSync:
|
||||
value = str(params[1])
|
||||
command.cooker.data.setVar(varname, value)
|
||||
|
||||
def getSetVariable(self, command, params):
|
||||
"""
|
||||
Read the value of a variable from data and set it into the datastore
|
||||
which effectively expands and locks the value.
|
||||
"""
|
||||
varname = params[0]
|
||||
result = self.getVariable(command, params)
|
||||
command.cooker.data.setVar(varname, result)
|
||||
return result
|
||||
|
||||
def setConfig(self, command, params):
|
||||
"""
|
||||
Set the value of variable in configuration
|
||||
@@ -216,7 +201,6 @@ class CommandsSync:
|
||||
postfiles = params[1].split()
|
||||
command.cooker.configuration.prefile = prefiles
|
||||
command.cooker.configuration.postfile = postfiles
|
||||
setPrePostConfFiles.needconfig = False
|
||||
|
||||
def getCpuCount(self, command, params):
|
||||
"""
|
||||
@@ -224,12 +208,10 @@ class CommandsSync:
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
getCpuCount.readonly = True
|
||||
getCpuCount.needconfig = False
|
||||
|
||||
def matchFile(self, command, params):
|
||||
fMatch = params[0]
|
||||
return command.cooker.matchFile(fMatch)
|
||||
matchFile.needconfig = False
|
||||
|
||||
def generateNewImage(self, command, params):
|
||||
image = params[0]
|
||||
@@ -243,7 +225,6 @@ class CommandsSync:
|
||||
def ensureDir(self, command, params):
|
||||
directory = params[0]
|
||||
bb.utils.mkdirhier(directory)
|
||||
ensureDir.needconfig = False
|
||||
|
||||
def setVarFile(self, command, params):
|
||||
"""
|
||||
@@ -254,7 +235,6 @@ class CommandsSync:
|
||||
default_file = params[2]
|
||||
op = params[3]
|
||||
command.cooker.modifyConfigurationVar(var, val, default_file, op)
|
||||
setVarFile.needconfig = False
|
||||
|
||||
def removeVarFile(self, command, params):
|
||||
"""
|
||||
@@ -262,7 +242,6 @@ class CommandsSync:
|
||||
"""
|
||||
var = params[0]
|
||||
command.cooker.removeConfigurationVar(var)
|
||||
removeVarFile.needconfig = False
|
||||
|
||||
def createConfigFile(self, command, params):
|
||||
"""
|
||||
@@ -270,7 +249,6 @@ class CommandsSync:
|
||||
"""
|
||||
name = params[0]
|
||||
command.cooker.createConfigFile(name)
|
||||
createConfigFile.needconfig = False
|
||||
|
||||
def setEventMask(self, command, params):
|
||||
handlerNum = params[0]
|
||||
@@ -278,7 +256,6 @@ class CommandsSync:
|
||||
debug_domains = params[2]
|
||||
mask = params[3]
|
||||
return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
|
||||
setEventMask.needconfig = False
|
||||
|
||||
def setFeatures(self, command, params):
|
||||
"""
|
||||
@@ -286,17 +263,11 @@ class CommandsSync:
|
||||
"""
|
||||
features = params[0]
|
||||
command.cooker.setFeatures(features)
|
||||
setFeatures.needconfig = False
|
||||
|
||||
# although we change the internal state of the cooker, this is transparent since
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
updateConfig.needconfig = False
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
|
||||
@@ -35,13 +35,10 @@ from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import signal
|
||||
import subprocess
|
||||
import errno
|
||||
import prserv.serv
|
||||
import pyinotify
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
collectlog = logging.getLogger("BitBake.Collection")
|
||||
@@ -114,49 +111,22 @@ class BBCooker:
|
||||
Manages one bitbake build run
|
||||
"""
|
||||
|
||||
def __init__(self, configuration, featureSet=None):
|
||||
def __init__(self, configuration, featureSet = []):
|
||||
self.recipecache = None
|
||||
self.skiplist = {}
|
||||
self.featureset = CookerFeatures()
|
||||
if featureSet:
|
||||
for f in featureSet:
|
||||
self.featureset.setFeature(f)
|
||||
for f in featureSet:
|
||||
self.featureset.setFeature(f)
|
||||
|
||||
self.configuration = configuration
|
||||
|
||||
self.configwatcher = pyinotify.WatchManager()
|
||||
self.configwatcher.bbseen = []
|
||||
self.configwatcher.bbwatchedfiles = []
|
||||
self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
|
||||
self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
|
||||
pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
|
||||
pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
|
||||
self.watcher = pyinotify.WatchManager()
|
||||
self.watcher.bbseen = []
|
||||
self.watcher.bbwatchedfiles = []
|
||||
self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
|
||||
|
||||
|
||||
self.initConfigurationData()
|
||||
|
||||
self.inotify_modified_files = []
|
||||
|
||||
def _process_inotify_updates(server, notifier_list, abort):
|
||||
for n in notifier_list:
|
||||
if n.check_events(timeout=0):
|
||||
# read notified events and enqeue them
|
||||
n.read_events()
|
||||
n.process_events()
|
||||
return 1.0
|
||||
|
||||
self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
|
||||
|
||||
self.baseconfig_valid = True
|
||||
self.parsecache_valid = False
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
if not self.lockBitbake():
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
if not self.lock:
|
||||
bb.fatal("Only one copy of bitbake should be run against a build directory")
|
||||
try:
|
||||
self.lock.seek(0)
|
||||
@@ -183,71 +153,20 @@ class BBCooker:
|
||||
self.parser = None
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def config_notifications(self, event):
|
||||
if not event.pathname in self.configwatcher.bbwatchedfiles:
|
||||
return
|
||||
if not event.path in self.inotify_modified_files:
|
||||
self.inotify_modified_files.append(event.path)
|
||||
self.baseconfig_valid = False
|
||||
|
||||
def notifications(self, event):
|
||||
if not event.path in self.inotify_modified_files:
|
||||
self.inotify_modified_files.append(event.path)
|
||||
self.parsecache_valid = False
|
||||
|
||||
def add_filewatch(self, deps, watcher=None):
|
||||
if not watcher:
|
||||
watcher = self.watcher
|
||||
for i in deps:
|
||||
watcher.bbwatchedfiles.append(i[0])
|
||||
f = os.path.dirname(i[0])
|
||||
if f in watcher.bbseen:
|
||||
continue
|
||||
watcher.bbseen.append(f)
|
||||
watchtarget = None
|
||||
while True:
|
||||
# We try and add watches for files that don't exist but if they did, would influence
|
||||
# the parser. The parent directory of these files may not exist, in which case we need
|
||||
# to watch any parent that does exist for changes.
|
||||
try:
|
||||
watcher.add_watch(f, self.watchmask, quiet=False)
|
||||
if watchtarget:
|
||||
watcher.bbwatchedfiles.append(watchtarget)
|
||||
break
|
||||
except pyinotify.WatchManagerError as e:
|
||||
if 'ENOENT' in str(e):
|
||||
watchtarget = f
|
||||
f = os.path.dirname(f)
|
||||
if f in watcher.bbseen:
|
||||
break
|
||||
watcher.bbseen.append(f)
|
||||
continue
|
||||
if 'ENOSPC' in str(e):
|
||||
providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
|
||||
providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
|
||||
providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
|
||||
providerlog.error("Root privilege is required to modify max_user_watches.")
|
||||
raise
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Cooker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
self.state = state.forceshutdown
|
||||
|
||||
def setFeatures(self, features):
|
||||
# we only accept a new feature set if we're in state initial, so we can reset without problems
|
||||
if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
|
||||
if self.state != state.initial:
|
||||
raise Exception("Illegal state for feature set change")
|
||||
original_featureset = list(self.featureset)
|
||||
for feature in features:
|
||||
self.featureset.setFeature(feature)
|
||||
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
|
||||
if (original_featureset != list(self.featureset)) and self.state != state.error:
|
||||
if (original_featureset != list(self.featureset)):
|
||||
self.reset()
|
||||
|
||||
def initConfigurationData(self):
|
||||
@@ -255,11 +174,6 @@ class BBCooker:
|
||||
self.state = state.initial
|
||||
self.caches_array = []
|
||||
|
||||
# Need to preserve BB_CONSOLELOG over resets
|
||||
consolelog = None
|
||||
if hasattr(self, "data"):
|
||||
consolelog = self.data.getVar("BB_CONSOLELOG", True)
|
||||
|
||||
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
|
||||
self.enableDataTracking()
|
||||
|
||||
@@ -286,92 +200,16 @@ class BBCooker:
|
||||
self.data = self.databuilder.data
|
||||
self.data_hash = self.databuilder.data_hash
|
||||
|
||||
if consolelog:
|
||||
self.data.setVar("BB_CONSOLELOG", consolelog)
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
import json, pickle
|
||||
DEFAULT_EVENTFILE = self.configuration.writeeventlog
|
||||
class EventLogWriteHandler():
|
||||
|
||||
class EventWriter():
|
||||
def __init__(self, cooker):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.event_queue = []
|
||||
|
||||
def init_file(self):
|
||||
try:
|
||||
# delete the old log
|
||||
os.remove(DEFAULT_EVENTFILE)
|
||||
except:
|
||||
pass
|
||||
|
||||
# write current configuration data
|
||||
with open(DEFAULT_EVENTFILE, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
def write_event(self, event):
|
||||
with open(DEFAULT_EVENTFILE, "a") as f:
|
||||
try:
|
||||
f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(e, traceback.format_exc(e))
|
||||
|
||||
|
||||
def send(self, event):
|
||||
event_class = event.__module__ + "." + event.__class__.__name__
|
||||
|
||||
# init on bb.event.BuildStarted
|
||||
if self.file_inited is None:
|
||||
if event_class == "bb.event.BuildStarted":
|
||||
self.init_file()
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for e in self.event_queue:
|
||||
self.write_event(e)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
else:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
|
||||
# set our handler's event processor
|
||||
event = EventWriter(self) # self is the cooker here
|
||||
|
||||
|
||||
# set up cooker features for this mock UI handler
|
||||
|
||||
# we need to write the dependency tree in the log
|
||||
self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
|
||||
# register the log file writer as UI Handler
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler())
|
||||
|
||||
|
||||
#
|
||||
# Copy of the data store which has been expanded.
|
||||
# Used for firing events and accessing variables where expansion needs to be accounted for
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.expanded_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.expanded_data)
|
||||
bb.parse.init_parser(self.expanded_data)
|
||||
self.event_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.event_data)
|
||||
bb.parse.init_parser(self.event_data)
|
||||
|
||||
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
|
||||
self.disableDataTracking()
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
|
||||
|
||||
|
||||
def enableDataTracking(self):
|
||||
self.configuration.tracking = True
|
||||
if hasattr(self, "data"):
|
||||
@@ -402,14 +240,14 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
|
||||
self.data.appendVar(var, val, **loginfo)
|
||||
|
||||
def saveConfigurationVar(self, var, val, default_file, op):
|
||||
|
||||
replaced = False
|
||||
#do not save if nothing changed
|
||||
if str(val) == self.data.getVar(var, False):
|
||||
if str(val) == self.data.getVar(var):
|
||||
return
|
||||
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
@@ -421,7 +259,7 @@ class BBCooker:
|
||||
listval += "%s " % value
|
||||
val = listval
|
||||
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
#comment or replace operations made on var
|
||||
for conf_file in conf_files:
|
||||
@@ -471,12 +309,12 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
|
||||
self.data.setVar(var, val, **loginfo)
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
for conf_file in conf_files:
|
||||
if topdir in conf_file:
|
||||
@@ -516,7 +354,7 @@ class BBCooker:
|
||||
|
||||
def parseConfiguration(self):
|
||||
# Set log file verbosity
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
|
||||
if verboselogs:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
|
||||
@@ -533,37 +371,6 @@ class BBCooker:
|
||||
|
||||
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def updateConfigOpts(self, options, environment):
|
||||
clean = True
|
||||
for o in options:
|
||||
if o in ['prefile', 'postfile']:
|
||||
clean = False
|
||||
server_val = getattr(self.configuration, "%s_server" % o)
|
||||
if not options[o] and server_val:
|
||||
# restore value provided on server start
|
||||
setattr(self.configuration, o, server_val)
|
||||
continue
|
||||
setattr(self.configuration, o, options[o])
|
||||
for k in bb.utils.approved_variables():
|
||||
if k in environment and k not in self.configuration.env:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if k in self.configuration.env and k not in environment:
|
||||
logger.debug(1, "Updating environment variable %s (deleted)" % (k))
|
||||
del self.configuration.env[k]
|
||||
clean = False
|
||||
if k not in self.configuration.env and k not in environment:
|
||||
continue
|
||||
if environment[k] != self.configuration.env[k]:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if not clean:
|
||||
logger.debug(1, "Base environment change, triggering reparse")
|
||||
self.baseconfig_valid = False
|
||||
self.reset()
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
Run any queued asynchronous command
|
||||
@@ -593,14 +400,12 @@ class BBCooker:
|
||||
|
||||
logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
|
||||
|
||||
def showEnvironment(self, buildfile=None, pkgs_to_build=None):
|
||||
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
|
||||
"""
|
||||
Show the outer or per-recipe environment
|
||||
Show the outer or per-package environment
|
||||
"""
|
||||
fn = None
|
||||
envdata = None
|
||||
if not pkgs_to_build:
|
||||
pkgs_to_build = []
|
||||
|
||||
if buildfile:
|
||||
# Parse the configuration here. We need to do it explicitly here since
|
||||
@@ -611,11 +416,11 @@ class BBCooker:
|
||||
fn = self.matchFile(fn)
|
||||
fn = bb.cache.Cache.realfn2virtual(fn, cls)
|
||||
elif len(pkgs_to_build) == 1:
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
if pkgs_to_build[0] in set(ignore.split()):
|
||||
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
|
||||
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
|
||||
|
||||
targetid = taskdata.getbuild_id(pkgs_to_build[0])
|
||||
fnid = taskdata.build_targets[targetid][0]
|
||||
@@ -645,10 +450,10 @@ class BBCooker:
|
||||
data.expandKeys(envdata)
|
||||
for e in envdata.keys():
|
||||
if data.getVarFlag( e, 'python', envdata ):
|
||||
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, True))
|
||||
logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
|
||||
|
||||
|
||||
def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
|
||||
def buildTaskData(self, pkgs_to_build, task, abort):
|
||||
"""
|
||||
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
|
||||
"""
|
||||
@@ -663,7 +468,7 @@ class BBCooker:
|
||||
localdata = data.createCopy(self.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
|
||||
taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
|
||||
|
||||
current = 0
|
||||
runlist = []
|
||||
@@ -675,9 +480,7 @@ class BBCooker:
|
||||
ktask = k2[1]
|
||||
taskdata.add_provider(localdata, self.recipecache, k)
|
||||
current += 1
|
||||
if not ktask.startswith("do_"):
|
||||
ktask = "do_%s" % ktask
|
||||
runlist.append([k, ktask])
|
||||
runlist.append([k, "do_%s" % ktask])
|
||||
bb.event.fire(bb.event.TreeDataPreparationProgress(current, len(fulltargetlist)), self.data)
|
||||
taskdata.add_unresolved(localdata, self.recipecache)
|
||||
bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
|
||||
@@ -690,10 +493,10 @@ class BBCooker:
|
||||
|
||||
# We set abort to False here to prevent unbuildable targets raising
|
||||
# an exception when we're just generating data
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
|
||||
######## WARNING : this function requires cache_extra to be enabled ########
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -933,27 +736,17 @@ class BBCooker:
|
||||
print("}", file=tdepends_file)
|
||||
logger.info("Task dependencies saved to 'task-depends.dot'")
|
||||
|
||||
def show_appends_with_no_recipes(self):
|
||||
# Determine which bbappends haven't been applied
|
||||
|
||||
# First get list of recipes, including skipped
|
||||
recipefns = self.recipecache.pkg_fn.keys()
|
||||
recipefns.extend(self.skiplist.keys())
|
||||
|
||||
# Work out list of bbappends that have been applied
|
||||
applied_appends = []
|
||||
for fn in recipefns:
|
||||
applied_appends.extend(self.collection.get_file_appends(fn))
|
||||
|
||||
appends_without_recipes = []
|
||||
for _, appendfn in self.collection.bbappends:
|
||||
if not appendfn in applied_appends:
|
||||
appends_without_recipes.append(appendfn)
|
||||
|
||||
def show_appends_with_no_recipes( self ):
|
||||
appends_without_recipes = [self.collection.appendlist[recipe]
|
||||
for recipe in self.collection.appendlist
|
||||
if recipe not in self.collection.appliedappendlist]
|
||||
if appends_without_recipes:
|
||||
msg = 'No recipes available for:\n %s' % '\n '.join(appends_without_recipes)
|
||||
warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
False) or "no"
|
||||
appendlines = (' %s' % append
|
||||
for appends in appends_without_recipes
|
||||
for append in appends)
|
||||
msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
|
||||
warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
self.data, False) or "no"
|
||||
if warn_only.lower() in ("1", "yes", "true"):
|
||||
bb.warn(msg)
|
||||
else:
|
||||
@@ -1000,8 +793,8 @@ class BBCooker:
|
||||
# Generate a list of parsed configuration files by searching the files
|
||||
# listed in the __depends and __base_depends variables with a .conf suffix.
|
||||
conffiles = []
|
||||
dep_files = self.data.getVar('__base_depends', False) or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends', False) or [])
|
||||
dep_files = self.data.getVar('__base_depends') or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends') or [])
|
||||
|
||||
for f in dep_files:
|
||||
if f[0].endswith(".conf"):
|
||||
@@ -1025,6 +818,7 @@ class BBCooker:
|
||||
or to find all machine configuration files one could call:
|
||||
findFilesMatchingInDir(self, 'conf/machines', 'conf')
|
||||
"""
|
||||
import re
|
||||
|
||||
matches = []
|
||||
p = re.compile(re.escape(filepattern))
|
||||
@@ -1077,13 +871,13 @@ class BBCooker:
|
||||
|
||||
return pkg_list
|
||||
|
||||
def generateTargetsTree(self, klass=None, pkgs=None):
|
||||
def generateTargetsTree(self, klass=None, pkgs=[]):
|
||||
"""
|
||||
Generate a dependency tree of buildable targets
|
||||
Generate an event with the result
|
||||
"""
|
||||
# if the caller hasn't specified a pkgs list default to universe
|
||||
if not pkgs:
|
||||
if not len(pkgs):
|
||||
pkgs = ['universe']
|
||||
# if inherited_class passed ensure all recipes which inherit the
|
||||
# specified class are included in pkgs
|
||||
@@ -1155,30 +949,42 @@ class BBCooker:
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
|
||||
if deps:
|
||||
try:
|
||||
deplist = bb.utils.explode_dep_versions2(deps)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
for dep, oplist in deplist.iteritems():
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
for dep in deplist:
|
||||
depsplit = dep.split(':')
|
||||
if len(depsplit) > 1:
|
||||
try:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
dep = depsplit[0]
|
||||
depnamelist.append(dep)
|
||||
|
||||
if dep in collection_list:
|
||||
for opstr in oplist:
|
||||
if depver:
|
||||
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
(op, depver) = opstr.split()
|
||||
if layerver:
|
||||
try:
|
||||
res = bb.utils.vercmp_string_op(layerver, depver, op)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
if not res:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver != depver:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
|
||||
errors = True
|
||||
collection_depends[c] = deplist.keys()
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
|
||||
@@ -1218,12 +1024,9 @@ class BBCooker:
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
t = time.gmtime()
|
||||
if not self.data.getVar("BUILDNAME", False):
|
||||
self.data.setVar("BUILDNAME", "${DATE}${TIME}")
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
|
||||
self.data.setVar("DATE", time.strftime('%Y%m%d', t))
|
||||
self.data.setVar("TIME", time.strftime('%H%M%S', t))
|
||||
if not self.data.getVar("BUILDNAME"):
|
||||
self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
|
||||
|
||||
def matchFiles(self, bf):
|
||||
"""
|
||||
@@ -1233,7 +1036,7 @@ class BBCooker:
|
||||
bf = os.path.abspath(bf)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
@@ -1316,36 +1119,29 @@ class BBCooker:
|
||||
# Invalidate task for target if force mode active
|
||||
if self.configuration.force:
|
||||
logger.verbose("Invalidate task %s, %s", task, fn)
|
||||
if not task.startswith("do_"):
|
||||
task = "do_%s" % task
|
||||
bb.parse.siggen.invalidate_task(task, self.recipecache, fn)
|
||||
bb.parse.siggen.invalidate_task('do_%s' % task, self.recipecache, fn)
|
||||
|
||||
# Setup taskdata structure
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.data, self.recipecache, item)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", True)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
if not task.startswith("do_"):
|
||||
task = "do_%s" % task
|
||||
runlist = [[item, task]]
|
||||
runlist = [[item, "do_%s" % task]]
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
|
||||
|
||||
def buildFileIdle(server, rq, abort):
|
||||
|
||||
msg = None
|
||||
interrupted = 0
|
||||
if abort or self.state == state.forceshutdown:
|
||||
rq.finish_runqueue(True)
|
||||
msg = "Forced shutdown"
|
||||
interrupted = 2
|
||||
elif self.state == state.shutdown:
|
||||
rq.finish_runqueue(False)
|
||||
msg = "Stopped build"
|
||||
interrupted = 1
|
||||
failures = 0
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
@@ -1357,7 +1153,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1373,15 +1169,12 @@ class BBCooker:
|
||||
|
||||
def buildTargetsIdle(server, rq, abort):
|
||||
msg = None
|
||||
interrupted = 0
|
||||
if abort or self.state == state.forceshutdown:
|
||||
rq.finish_runqueue(True)
|
||||
msg = "Forced shutdown"
|
||||
interrupted = 2
|
||||
elif self.state == state.shutdown:
|
||||
rq.finish_runqueue(False)
|
||||
msg = "Stopped build"
|
||||
interrupted = 1
|
||||
failures = 0
|
||||
try:
|
||||
retval = rq.execute_runqueue()
|
||||
@@ -1393,38 +1186,19 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
return True
|
||||
return retval
|
||||
|
||||
build.reset_cache()
|
||||
self.buildSetVars()
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
task = self.configuration.cmd
|
||||
|
||||
if not task.startswith("do_"):
|
||||
task = "do_%s" % task
|
||||
|
||||
taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", False)
|
||||
|
||||
# make targets to always look as <target>:do_<task>
|
||||
ntargets = []
|
||||
for target in fulltargetlist:
|
||||
if ":" in target:
|
||||
if ":do_" not in target:
|
||||
target = "%s:do_%s" % tuple(target.split(":", 1))
|
||||
else:
|
||||
target = "%s:%s" % (target, task)
|
||||
ntargets.append(target)
|
||||
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.data)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
|
||||
if 'universe' in targets:
|
||||
@@ -1454,6 +1228,7 @@ class BBCooker:
|
||||
'''
|
||||
Create a new image with a "require"/"inherit" base_image statement
|
||||
'''
|
||||
import re
|
||||
if timestamp:
|
||||
image_name = os.path.splitext(image)[0]
|
||||
timestr = time.strftime("-%Y%m%d-%H%M%S")
|
||||
@@ -1477,7 +1252,7 @@ class BBCooker:
|
||||
if base_image is None:
|
||||
imagefile.write("inherit core-image\n")
|
||||
else:
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
if topdir in base_image:
|
||||
base_image = require_line.split()[1]
|
||||
imagefile.write("require " + base_image + "\n")
|
||||
@@ -1499,52 +1274,34 @@ class BBCooker:
|
||||
if timestamp:
|
||||
return timestr
|
||||
|
||||
def updateCacheSync(self):
|
||||
if self.state == state.running:
|
||||
return
|
||||
|
||||
# reload files for which we got notifications
|
||||
for p in self.inotify_modified_files:
|
||||
bb.parse.update_cache(p)
|
||||
self.inotify_modified_files = []
|
||||
|
||||
if not self.baseconfig_valid:
|
||||
logger.debug(1, "Reloading base configuration data")
|
||||
self.initConfigurationData()
|
||||
self.baseconfig_valid = True
|
||||
self.parsecache_valid = False
|
||||
|
||||
# This is called for all async commands when self.state != running
|
||||
def updateCache(self):
|
||||
if self.state == state.running:
|
||||
return
|
||||
|
||||
if self.state in (state.shutdown, state.forceshutdown, state.error):
|
||||
if self.state in (state.shutdown, state.forceshutdown):
|
||||
if hasattr(self.parser, 'shutdown'):
|
||||
self.parser.shutdown(clean=False, force = True)
|
||||
raise bb.BBHandledException()
|
||||
|
||||
if self.state != state.parsing:
|
||||
self.updateCacheSync()
|
||||
|
||||
if self.state != state.parsing and not self.parsecache_valid:
|
||||
self.parseConfiguration ()
|
||||
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
|
||||
bb.event.fire(bb.event.SanityCheck(False), self.data)
|
||||
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
self.recipecache.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.recipecache.ignored_dependencies.add(dep)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.parsecache_valid = True
|
||||
|
||||
self.state = state.parsing
|
||||
self.state = state.parsing
|
||||
|
||||
if not self.parser.parse_next():
|
||||
collectlog.debug(1, "parsing complete")
|
||||
@@ -1552,13 +1309,8 @@ class BBCooker:
|
||||
raise bb.BBHandledException()
|
||||
self.show_appends_with_no_recipes()
|
||||
self.handlePrefProviders()
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
|
||||
self.state = state.running
|
||||
|
||||
# Send an event listing all stamps reachable after parsing
|
||||
# which the metadata may use to clean up stale data
|
||||
event = bb.event.ReachableStamps(self.recipecache.stamp)
|
||||
bb.event.fire(event, self.expanded_data)
|
||||
return None
|
||||
|
||||
return True
|
||||
@@ -1571,7 +1323,7 @@ class BBCooker:
|
||||
if len(pkgs_to_build) == 0:
|
||||
raise NothingToBuild
|
||||
|
||||
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
for pkg in pkgs_to_build:
|
||||
if pkg in ignore:
|
||||
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
@@ -1601,41 +1353,13 @@ class BBCooker:
|
||||
try:
|
||||
self.prhost = prserv.serv.auto_start(self.data)
|
||||
except prserv.serv.PRServiceConfigError:
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
self.state = state.error
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.data)
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
lockfile = self.lock.name
|
||||
self.lock.close()
|
||||
self.lock = None
|
||||
|
||||
while not self.lock:
|
||||
with bb.utils.timeout(3):
|
||||
self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
|
||||
if not self.lock:
|
||||
# Some systems may not have lsof available
|
||||
procs = None
|
||||
try:
|
||||
procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
if procs is None:
|
||||
# Fall back to fuser if lsof is unavailable
|
||||
try:
|
||||
procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
|
||||
if procs:
|
||||
msg += ":\n%s" % str(procs)
|
||||
print(msg)
|
||||
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
|
||||
def shutdown(self, force = False):
|
||||
if force:
|
||||
@@ -1649,19 +1373,6 @@ class BBCooker:
|
||||
def reset(self):
|
||||
self.initConfigurationData()
|
||||
|
||||
def lockBitbake(self):
|
||||
if not hasattr(self, 'lock'):
|
||||
self.lock = None
|
||||
if self.data:
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
if lockfile:
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
return self.lock
|
||||
|
||||
def unlockBitbake(self):
|
||||
if hasattr(self, 'lock') and self.lock:
|
||||
bb.utils.unlockfile(self.lock)
|
||||
|
||||
def server_main(cooker, func, *args):
|
||||
cooker.pre_serve()
|
||||
|
||||
@@ -1696,7 +1407,9 @@ class CookerExit(bb.event.Event):
|
||||
|
||||
class CookerCollectFiles(object):
|
||||
def __init__(self, priorities):
|
||||
self.appendlist = {}
|
||||
self.bbappends = []
|
||||
self.appliedappendlist = []
|
||||
self.bbfile_config_priorities = priorities
|
||||
|
||||
def calc_bbfile_priority( self, filename, matched = None ):
|
||||
@@ -1725,7 +1438,7 @@ class CookerCollectFiles(object):
|
||||
for ignored in ('SCCS', 'CVS', '.svn'):
|
||||
if ignored in dirs:
|
||||
dirs.remove(ignored)
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
|
||||
|
||||
return found
|
||||
|
||||
@@ -1791,6 +1504,10 @@ class CookerCollectFiles(object):
|
||||
for f in bbappend:
|
||||
base = os.path.basename(f).replace('.bbappend', '.bb')
|
||||
self.bbappends.append((base, f))
|
||||
if not base in self.appendlist:
|
||||
self.appendlist[base] = []
|
||||
if f not in self.appendlist[base]:
|
||||
self.appendlist[base].append(f)
|
||||
|
||||
# Find overlayed recipes
|
||||
# bbfiles will be in priority order which makes this easy
|
||||
@@ -1815,10 +1532,11 @@ class CookerCollectFiles(object):
|
||||
for b in self.bbappends:
|
||||
(bbappend, filename) = b
|
||||
if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
|
||||
self.appliedappendlist.append(bbappend)
|
||||
filelist.append(filename)
|
||||
return filelist
|
||||
|
||||
def collection_priorities(self, pkgfns, d):
|
||||
def collection_priorities(self, pkgfns):
|
||||
|
||||
priorities = {}
|
||||
|
||||
@@ -1827,10 +1545,10 @@ class CookerCollectFiles(object):
|
||||
for p in pkgfns:
|
||||
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
|
||||
priorities[p] = self.calc_bbfile_priority(realfn, matched)
|
||||
|
||||
|
||||
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
|
||||
unmatched = set()
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
if not regex in matched:
|
||||
unmatched.add(regex)
|
||||
|
||||
@@ -1847,8 +1565,7 @@ class CookerCollectFiles(object):
|
||||
|
||||
for collection, pattern, regex, _ in self.bbfile_config_priorities:
|
||||
if regex in unmatched:
|
||||
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
return priorities
|
||||
|
||||
@@ -1914,6 +1631,8 @@ class Parser(multiprocessing.Process):
|
||||
finally:
|
||||
logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
|
||||
prof.dump_stats(logfile)
|
||||
bb.utils.process_profilelog(logfile)
|
||||
print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
|
||||
|
||||
def realrun(self):
|
||||
if self.init:
|
||||
@@ -1983,7 +1702,6 @@ class CookerParser(object):
|
||||
self.current = 0
|
||||
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count())
|
||||
self.process_names = []
|
||||
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
|
||||
self.fromcache = []
|
||||
@@ -2019,7 +1737,6 @@ class CookerParser(object):
|
||||
for i in range(0, self.num_processes):
|
||||
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
|
||||
parser.start()
|
||||
self.process_names.append(parser.name)
|
||||
self.processes.append(parser)
|
||||
|
||||
self.results = itertools.chain(self.results, self.parse_generator())
|
||||
@@ -2063,16 +1780,6 @@ class CookerParser(object):
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.data)
|
||||
if self.cooker.configuration.profile:
|
||||
profiles = []
|
||||
for i in self.process_names:
|
||||
logfile = "profile-parse-%s.log" % i
|
||||
if os.path.exists(logfile):
|
||||
profiles.append(logfile)
|
||||
|
||||
pout = "profile-parse.log.processed"
|
||||
bb.utils.process_profilelog(profiles, pout = pout)
|
||||
print("Processed parsing statistics saved to %s" % (pout))
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
@@ -2158,7 +1865,7 @@ class CookerParser(object):
|
||||
self.skipped += 1
|
||||
self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
|
||||
self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
|
||||
parsed=parsed, watcher = self.cooker.add_filewatch)
|
||||
parsed=parsed)
|
||||
return True
|
||||
|
||||
def reparse(self, filename):
|
||||
|
||||
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ConfigParameters(object):
|
||||
def __init__(self, argv=sys.argv):
|
||||
self.options, targets = self.parseCommandLine(argv)
|
||||
def __init__(self):
|
||||
self.options, targets = self.parseCommandLine()
|
||||
self.environment = self.parseEnvironment()
|
||||
|
||||
self.options.pkgs_to_build = targets or []
|
||||
@@ -46,7 +46,7 @@ class ConfigParameters(object):
|
||||
for key, val in self.options.__dict__.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
raise Exception("Caller must implement commandline option parsing")
|
||||
|
||||
def parseEnvironment(self):
|
||||
@@ -63,24 +63,12 @@ class ConfigParameters(object):
|
||||
raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
|
||||
|
||||
if not self.options.pkgs_to_build:
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
|
||||
bbpkgs, error = server.runCommand(["getVariable", "BBPKGS"])
|
||||
if error:
|
||||
raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
|
||||
raise Exception("Unable to get the value of BBPKGS from the server: %s" % error)
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server, environment):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile",
|
||||
"prefile", "postfile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
def parseActions(self):
|
||||
# Parse any commandline into actions
|
||||
action = {'action':None, 'msg':None}
|
||||
@@ -129,8 +117,6 @@ class CookerConfiguration(object):
|
||||
self.extra_assume_provided = []
|
||||
self.prefile = []
|
||||
self.postfile = []
|
||||
self.prefile_server = []
|
||||
self.postfile_server = []
|
||||
self.debug = 0
|
||||
self.cmd = None
|
||||
self.abort = True
|
||||
@@ -142,7 +128,6 @@ class CookerConfiguration(object):
|
||||
self.dry_run = False
|
||||
self.tracking = False
|
||||
self.interface = []
|
||||
self.writeeventlog = False
|
||||
|
||||
self.env = {}
|
||||
|
||||
@@ -176,23 +161,11 @@ def catch_parse_error(func):
|
||||
def wrapped(fn, *args):
|
||||
try:
|
||||
return func(fn, *args)
|
||||
except IOError as exc:
|
||||
except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
import traceback
|
||||
parselog.critical(traceback.format_exc())
|
||||
parselog.critical( traceback.format_exc())
|
||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
||||
sys.exit(1)
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
import traceback
|
||||
|
||||
bbdir = os.path.dirname(__file__) + os.sep
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
for tb in iter(lambda: tb.tb_next, None):
|
||||
# Skip frames in bitbake itself, we only want the metadata
|
||||
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
|
||||
if not fn.startswith(bbdir):
|
||||
break
|
||||
parselog.critical("Unable to parse %s", fn, exc_info=(exc_class, exc, tb))
|
||||
sys.exit(1)
|
||||
return wrapped
|
||||
|
||||
@catch_parse_error
|
||||
@@ -254,13 +227,10 @@ class CookerDataBuilder(object):
|
||||
try:
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
except SyntaxError:
|
||||
raise bb.BBHandledException
|
||||
except bb.data_smart.ExpansionError as e:
|
||||
logger.error(str(e))
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
logger.exception("Error parsing configuration files")
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
|
||||
def _findLayerConf(self, data):
|
||||
return findConfigFile("bblayers.conf", data)
|
||||
@@ -284,11 +254,8 @@ class CookerDataBuilder(object):
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
@@ -316,15 +283,15 @@ class CookerDataBuilder(object):
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
|
||||
if data.getVar("BB_INVALIDCONF", False) is True:
|
||||
if data.getVar("BB_INVALIDCONF") is True:
|
||||
data.setVar("BB_INVALIDCONF", False)
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
return
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Python Daemonizing helper
|
||||
Python Deamonizing helper
|
||||
|
||||
Configurable daemon behaviors:
|
||||
|
||||
@@ -12,11 +12,8 @@ A failed call to fork() now raises an exception.
|
||||
|
||||
References:
|
||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||
http://www.apuebook.com/apue3e.html
|
||||
2) The Linux Programming Interface: Michael Kerrisk
|
||||
http://man7.org/tlpi/index.html
|
||||
3) Unix Programming Frequently Asked Questions:
|
||||
http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
||||
2) Unix Programming Frequently Asked Questions:
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||
|
||||
Modified to allow a function to be daemonized and return for
|
||||
bitbake use by Richard Purdie
|
||||
@@ -28,7 +25,7 @@ __version__ = "0.2"
|
||||
|
||||
# Standard Python modules.
|
||||
import os # Miscellaneous OS interfaces.
|
||||
import sys # System-specific parameters and functions.
|
||||
import sys # System-specific parameters and functions.
|
||||
|
||||
# Default daemon parameters.
|
||||
# File mode creation mask of the daemon.
|
||||
@@ -131,7 +128,7 @@ def createDaemon(function, logfile):
|
||||
# of methods to accomplish this task. Three are listed below.
|
||||
#
|
||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||
# number of open file descriptors to close. If it doesn't exist, use
|
||||
# number of open file descriptors to close. If it doesn't exists, use
|
||||
# the default value (configurable).
|
||||
#
|
||||
# try:
|
||||
@@ -149,7 +146,7 @@ def createDaemon(function, logfile):
|
||||
# OR
|
||||
#
|
||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||
# that can be opened by this process. If there is no limit on the
|
||||
# that can be opened by this process. If there is not limit on the
|
||||
# resource, use the default value.
|
||||
#
|
||||
import resource # Resource usage information.
|
||||
|
||||
@@ -6,7 +6,7 @@ BitBake 'Data' implementations
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
The expandKeys and update_data are the most expensive
|
||||
The expandData and update_data are the most expensive
|
||||
operations. At night the cookie monster came by and
|
||||
suggested 'give me cookies on setting the variables and
|
||||
things will work out'. Taking this suggestion into account
|
||||
@@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
|
||||
monster seems to be right. We will track setVar more carefully
|
||||
to have faster update_data and expandKeys operations.
|
||||
|
||||
This is a trade-off between speed and memory again but
|
||||
This is a treade-off between speed and memory again but
|
||||
the speed is more critical here.
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@ the speed is more critical here.
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import sys, os, re
|
||||
if sys.argv[0][-5:] == "pydoc":
|
||||
@@ -84,7 +84,7 @@ def setVar(var, value, d):
|
||||
d.setVar(var, value)
|
||||
|
||||
|
||||
def getVar(var, d, exp = False):
|
||||
def getVar(var, d, exp = 0):
|
||||
"""Gets the value of a variable"""
|
||||
return d.getVar(var, exp)
|
||||
|
||||
@@ -159,12 +159,13 @@ def expandKeys(alterdata, readdata = None):
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
for key in sorted(todolist):
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
newval = alterdata.getVar(ekey, False)
|
||||
if newval is not None:
|
||||
val = alterdata.getVar(key, False)
|
||||
if val is not None:
|
||||
newval = alterdata.getVar(ekey, 0)
|
||||
if newval:
|
||||
val = alterdata.getVar(key, 0)
|
||||
if val is not None and newval is not None:
|
||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
||||
alterdata.renameVar(key, ekey)
|
||||
|
||||
@@ -174,7 +175,7 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
for s in savedenv.keys():
|
||||
if s in permitted:
|
||||
try:
|
||||
d.setVar(s, savedenv.getVar(s, True), op = 'from env')
|
||||
d.setVar(s, getVar(s, savedenv, True), op = 'from env')
|
||||
if s in exportlist:
|
||||
d.setVarFlag(s, "export", True, op = 'auto env export')
|
||||
except TypeError:
|
||||
@@ -182,49 +183,42 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
if d.getVarFlag(var, "python"):
|
||||
return False
|
||||
if getVarFlag(var, "python", d):
|
||||
return 0
|
||||
|
||||
export = d.getVarFlag(var, "export")
|
||||
unexport = d.getVarFlag(var, "unexport")
|
||||
func = d.getVarFlag(var, "func")
|
||||
export = getVarFlag(var, "export", d)
|
||||
unexport = getVarFlag(var, "unexport", d)
|
||||
func = getVarFlag(var, "func", d)
|
||||
if not all and not export and not unexport and not func:
|
||||
return False
|
||||
return 0
|
||||
|
||||
try:
|
||||
if all:
|
||||
oval = d.getVar(var, False)
|
||||
val = d.getVar(var, True)
|
||||
oval = getVar(var, d, 0)
|
||||
val = getVar(var, d, 1)
|
||||
except (KeyboardInterrupt, bb.build.FuncFailed):
|
||||
raise
|
||||
except Exception as exc:
|
||||
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
||||
return False
|
||||
return 0
|
||||
|
||||
if all:
|
||||
d.varhistory.emit(var, oval, val, o, d)
|
||||
d.varhistory.emit(var, oval, val, o)
|
||||
|
||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||
return False
|
||||
return 0
|
||||
|
||||
varExpanded = d.expand(var)
|
||||
varExpanded = expand(var, d)
|
||||
|
||||
if unexport:
|
||||
o.write('unset %s\n' % varExpanded)
|
||||
return False
|
||||
return 0
|
||||
|
||||
if val is None:
|
||||
return False
|
||||
return 0
|
||||
|
||||
val = str(val)
|
||||
|
||||
if varExpanded.startswith("BASH_FUNC_"):
|
||||
varExpanded = varExpanded[10:-2]
|
||||
val = val[3:] # Strip off "() "
|
||||
o.write("%s() %s\n" % (varExpanded, val))
|
||||
o.write("export -f %s\n" % (varExpanded))
|
||||
return True
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||
@@ -239,7 +233,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
alter = re.sub('\n', ' \\\n', alter)
|
||||
alter = re.sub('\\$', '\\\\$', alter)
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return False
|
||||
return 0
|
||||
|
||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
@@ -271,9 +265,8 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
|
||||
for key in keys:
|
||||
emit_var(key, o, d, False)
|
||||
emit_var(key, o, d, False) and o.write('\n')
|
||||
|
||||
o.write('\n')
|
||||
emit_var(func, o, d, False) and o.write('\n')
|
||||
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
@@ -289,41 +282,6 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}"""
|
||||
|
||||
def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
def write_func(func, o, call = False):
|
||||
body = d.getVar(func, True)
|
||||
if not body.startswith("def"):
|
||||
body = _functionfmt.format(function=func, body=body)
|
||||
|
||||
o.write(body.strip() + "\n\n")
|
||||
if call:
|
||||
o.write(func + "(d)" + "\n\n")
|
||||
|
||||
write_func(func, o, True)
|
||||
pp = bb.codeparser.PythonParser(func, logger)
|
||||
pp.parse_python(d.getVar(func, True))
|
||||
newdeps = pp.execs
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
seen = set()
|
||||
while newdeps:
|
||||
deps = newdeps
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
|
||||
write_func(dep, o)
|
||||
pp = bb.codeparser.PythonParser(dep, logger)
|
||||
pp.parse_python(d.getVar(dep, True))
|
||||
newdeps |= pp.execs
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
def update_data(d):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
d.finalize(parent = True)
|
||||
@@ -420,7 +378,7 @@ def generate_dependencies(d):
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
||||
newdeps = deps[task]
|
||||
@@ -438,7 +396,7 @@ def generate_dependencies(d):
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
val = d.getVar('__inherit_cache', False) or []
|
||||
val = getVar('__inherit_cache', d) or []
|
||||
needle = os.path.join('classes', '%s.bbclass' % klass)
|
||||
for v in val:
|
||||
if v.endswith(needle):
|
||||
|
||||
@@ -54,36 +54,27 @@ def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
return
|
||||
# Infer caller's likely values for variable (var) and value (value),
|
||||
# to reduce clutter in the rest of the code.
|
||||
above = None
|
||||
def set_above():
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
try:
|
||||
raise Exception
|
||||
except Exception:
|
||||
tb = sys.exc_info()[2]
|
||||
if parent:
|
||||
return tb.tb_frame.f_back.f_back.f_back
|
||||
above = tb.tb_frame.f_back.f_back
|
||||
else:
|
||||
return tb.tb_frame.f_back.f_back
|
||||
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
if not above:
|
||||
above = set_above()
|
||||
lcls = above.f_locals.items()
|
||||
above = tb.tb_frame.f_back
|
||||
lcls = above.f_locals.items()
|
||||
for k, v in lcls:
|
||||
if k == 'value' and 'detail' not in loginfo:
|
||||
loginfo['detail'] = v
|
||||
if k == 'var' and 'variable' not in loginfo:
|
||||
loginfo['variable'] = v
|
||||
# Infer file/line/function from traceback
|
||||
# Don't use traceback.extract_stack() since it fills the line contents which
|
||||
# we don't need and that hits stat syscalls
|
||||
if 'file' not in loginfo:
|
||||
if not above:
|
||||
above = set_above()
|
||||
f = above.f_back
|
||||
line = f.f_lineno
|
||||
file = f.f_code.co_filename
|
||||
func = f.f_code.co_name
|
||||
depth = 3
|
||||
if parent:
|
||||
depth = 4
|
||||
file, line, func, text = traceback.extract_stack(limit = depth)[0]
|
||||
loginfo['file'] = file
|
||||
loginfo['line'] = line
|
||||
if func not in loginfo:
|
||||
@@ -240,10 +231,6 @@ class VariableHistory(object):
|
||||
|
||||
if var not in self.variables:
|
||||
self.variables[var] = []
|
||||
if not isinstance(self.variables[var], list):
|
||||
return
|
||||
if 'nodups' in loginfo and loginfo in self.variables[var]:
|
||||
return
|
||||
self.variables[var].append(loginfo.copy())
|
||||
|
||||
def variable(self, var):
|
||||
@@ -252,20 +239,8 @@ class VariableHistory(object):
|
||||
else:
|
||||
return []
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
def emit(self, var, oval, val, o):
|
||||
history = self.variable(var)
|
||||
|
||||
# Append override history
|
||||
if var in d.overridedata:
|
||||
for (r, override) in d.overridedata[var]:
|
||||
for event in self.variable(r):
|
||||
loginfo = event.copy()
|
||||
if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
|
||||
continue
|
||||
loginfo['variable'] = var
|
||||
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
|
||||
history.append(loginfo)
|
||||
|
||||
commentVal = re.sub('\n', '\n#', str(oval))
|
||||
if history:
|
||||
if len(history) == 1:
|
||||
@@ -288,7 +263,7 @@ class VariableHistory(object):
|
||||
flag = ''
|
||||
o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
|
||||
if len(history) > 1:
|
||||
o.write("# pre-expansion value:\n")
|
||||
o.write("# computed:\n")
|
||||
o.write('# "%s"\n' % (commentVal))
|
||||
else:
|
||||
o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
|
||||
@@ -312,31 +287,6 @@ class VariableHistory(object):
|
||||
lines.append(line)
|
||||
return lines
|
||||
|
||||
def get_variable_items_files(self, var, d):
|
||||
"""
|
||||
Use variable history to map items added to a list variable and
|
||||
the files in which they were added.
|
||||
"""
|
||||
history = self.variable(var)
|
||||
finalitems = (d.getVar(var, True) or '').split()
|
||||
filemap = {}
|
||||
isset = False
|
||||
for event in history:
|
||||
if 'flag' in event:
|
||||
continue
|
||||
if event['op'] == '_remove':
|
||||
continue
|
||||
if isset and event['op'] == 'set?':
|
||||
continue
|
||||
isset = True
|
||||
items = d.expand(event['detail']).split()
|
||||
for item in items:
|
||||
# This is a little crude but is belt-and-braces to avoid us
|
||||
# having to handle every possible operation type specifically
|
||||
if item in finalitems and not item in filemap:
|
||||
filemap[item] = event['file']
|
||||
return filemap
|
||||
|
||||
def del_var_history(self, var, f=None, line=None):
|
||||
"""If file f and line are not given, the entire history of var is deleted"""
|
||||
if var in self.variables:
|
||||
@@ -346,23 +296,18 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self):
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
# cookie monster tribute
|
||||
# Need to be careful about writes to overridedata as
|
||||
# its only a shallow copy, could influence other data store
|
||||
# copies!
|
||||
self.overridedata = {}
|
||||
self.overrides = None
|
||||
self.overridevars = set(["OVERRIDES", "FILE"])
|
||||
self.inoverride = False
|
||||
self._special_values = special
|
||||
self._seen_overrides = seen
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
def enableTracking(self):
|
||||
self._tracking = True
|
||||
@@ -389,11 +334,10 @@ class DataSmart(MutableMapping):
|
||||
break
|
||||
except ExpansionError:
|
||||
raise
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception as exc:
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
raise ExpansionError, ExpansionError(varname, s, exc), tb
|
||||
raise ExpansionError(varname, s, exc)
|
||||
|
||||
varparse.value = s
|
||||
|
||||
@@ -405,34 +349,97 @@ class DataSmart(MutableMapping):
|
||||
def expand(self, s, varname = None):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
def finalize(self, parent = False):
|
||||
return
|
||||
|
||||
def internal_finalize(self, parent = False):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
self.overrides = None
|
||||
|
||||
def need_overrides(self):
|
||||
if self.overrides is not None:
|
||||
return
|
||||
if self.inoverride:
|
||||
return
|
||||
for count in range(5):
|
||||
self.inoverride = True
|
||||
# Can end up here recursively so setup dummy values
|
||||
self.overrides = []
|
||||
self.overridesset = set()
|
||||
self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
self.overridesset = set(self.overrides)
|
||||
self.inoverride = False
|
||||
self.expand_cache = {}
|
||||
newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
if newoverrides == self.overrides:
|
||||
break
|
||||
self.overrides = newoverrides
|
||||
self.overridesset = set(self.overrides)
|
||||
else:
|
||||
bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.")
|
||||
overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
finalize_caller = {
|
||||
'op': 'finalize',
|
||||
}
|
||||
infer_caller_details(finalize_caller, parent = parent, varval = False)
|
||||
|
||||
#
|
||||
# Well let us see what breaks here. We used to iterate
|
||||
# over each variable and apply the override and then
|
||||
# do the line expanding.
|
||||
# If we have bad luck - which we will have - the keys
|
||||
# where in some order that is so important for this
|
||||
# method which we don't have anymore.
|
||||
# Anyway we will fix that and write test cases this
|
||||
# time.
|
||||
|
||||
#
|
||||
# First we apply all overrides
|
||||
# Then we will handle _append and _prepend and store the _remove
|
||||
# information for later.
|
||||
#
|
||||
|
||||
# We only want to report finalization once per variable overridden.
|
||||
finalizes_reported = {}
|
||||
|
||||
for o in overrides:
|
||||
# calculate '_'+override
|
||||
l = len(o) + 1
|
||||
|
||||
# see if one should even try
|
||||
if o not in self._seen_overrides:
|
||||
continue
|
||||
|
||||
vars = self._seen_overrides[o].copy()
|
||||
for var in vars:
|
||||
name = var[:-l]
|
||||
try:
|
||||
# Report only once, even if multiple changes.
|
||||
if name not in finalizes_reported:
|
||||
finalizes_reported[name] = True
|
||||
finalize_caller['variable'] = name
|
||||
finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
|
||||
self.varhistory.record(**finalize_caller)
|
||||
# Copy history of the override over.
|
||||
for event in self.varhistory.variable(var):
|
||||
loginfo = event.copy()
|
||||
loginfo['variable'] = name
|
||||
loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
|
||||
self.delVar(var)
|
||||
except Exception:
|
||||
logger.info("Untracked delVar")
|
||||
|
||||
# now on to the appends and prepends, and stashing the removes
|
||||
for op in __setvar_keyword__:
|
||||
if op in self._special_values:
|
||||
appends = self._special_values[op] or []
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
if op == "_append":
|
||||
sval = self.getVar(append, False) or ""
|
||||
sval += a
|
||||
self.setVar(append, sval)
|
||||
elif op == "_prepend":
|
||||
sval = a + (self.getVar(append, False) or "")
|
||||
self.setVar(append, sval)
|
||||
elif op == "_remove":
|
||||
removes = self.getVarFlag(append, "_removeactive", False) or []
|
||||
removes.extend(a.split())
|
||||
self.setVarFlag(append, "_removeactive", removes, ignore=True)
|
||||
|
||||
# We save overrides that may be applied at some later stage
|
||||
if keep:
|
||||
self.setVarFlag(append, op, keep, ignore=True)
|
||||
else:
|
||||
self.delVarFlag(append, op, ignore=True)
|
||||
|
||||
def initVar(self, var):
|
||||
self.expand_cache = {}
|
||||
@@ -463,10 +470,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def setVar(self, var, value, **loginfo):
|
||||
#print("var=" + str(var) + " val=" + str(value))
|
||||
parsing=False
|
||||
if 'parsing' in loginfo:
|
||||
parsing=True
|
||||
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
self.expand_cache = {}
|
||||
@@ -488,93 +491,47 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
# todo make sure keyword is not __doc__ or __module__
|
||||
# pay the cookie monster
|
||||
try:
|
||||
self._special_values[keyword].add(base)
|
||||
except KeyError:
|
||||
self._special_values[keyword] = set()
|
||||
self._special_values[keyword].add(base)
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(base, **loginfo)
|
||||
|
||||
if base in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
return
|
||||
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if not parsing:
|
||||
if "_append" in self.dict[var]:
|
||||
del self.dict[var]["_append"]
|
||||
if "_prepend" in self.dict[var]:
|
||||
del self.dict[var]["_prepend"]
|
||||
if var in self.overridedata:
|
||||
active = []
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
active.append(r)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active.append(r)
|
||||
for a in active:
|
||||
self.delVar(a)
|
||||
del self.overridedata[var]
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
# setting var
|
||||
self.dict[var]["_content"] = value
|
||||
self.varhistory.record(**loginfo)
|
||||
|
||||
if var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
|
||||
def _setvar_update_overridevars(self, var, value):
|
||||
vardata = self.expandWithRefs(value, var)
|
||||
new = vardata.references
|
||||
new.update(vardata.contains.keys())
|
||||
while not new.issubset(self.overridevars):
|
||||
nextnew = set()
|
||||
self.overridevars.update(new)
|
||||
for i in new:
|
||||
vardata = self.expandWithRefs(self.getVar(i, True), i)
|
||||
nextnew.update(vardata.references)
|
||||
nextnew.update(vardata.contains.keys())
|
||||
new = nextnew
|
||||
self.internal_finalize(True)
|
||||
|
||||
def _setvar_update_overrides(self, var, **loginfo):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
if shortvar not in self.overridedata:
|
||||
self.overridedata[shortvar] = []
|
||||
if [var, override] not in self.overridedata[shortvar]:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].append([var, override])
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if len(override) > 0:
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
|
||||
def getVar(self, var, expand=False, noweakdefault=False, parsing=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
|
||||
def renameVar(self, key, newkey, **loginfo):
|
||||
"""
|
||||
Rename the variable key to newkey
|
||||
"""
|
||||
val = self.getVar(key, 0, parsing=True)
|
||||
val = self.getVar(key, 0)
|
||||
if val is not None:
|
||||
loginfo['variable'] = newkey
|
||||
loginfo['op'] = 'rename from %s' % key
|
||||
loginfo['detail'] = val
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(newkey, val, ignore=True, parsing=True)
|
||||
self.setVar(newkey, val, ignore=True)
|
||||
|
||||
for i in (__setvar_keyword__):
|
||||
src = self.getVarFlag(key, i)
|
||||
@@ -585,14 +542,9 @@ class DataSmart(MutableMapping):
|
||||
dest.extend(src)
|
||||
self.setVarFlag(newkey, i, dest, ignore=True)
|
||||
|
||||
if key in self.overridedata:
|
||||
self.overridedata[newkey] = []
|
||||
for (v, o) in self.overridedata[key]:
|
||||
self.overridedata[newkey].append([v.replace(key, newkey), o])
|
||||
self.renameVar(v, v.replace(key, newkey))
|
||||
|
||||
if '_' in newkey and val is None:
|
||||
self._setvar_update_overrides(newkey, **loginfo)
|
||||
if i in self._special_values and key in self._special_values[i]:
|
||||
self._special_values[i].remove(key)
|
||||
self._special_values[i].add(newkey)
|
||||
|
||||
loginfo['variable'] = key
|
||||
loginfo['op'] = 'rename (to)'
|
||||
@@ -603,12 +555,14 @@ class DataSmart(MutableMapping):
|
||||
def appendVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'append'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_append", value, ignore=True, parsing=True)
|
||||
newvalue = (self.getVar(var, False) or "") + value
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def prependVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'prepend'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
|
||||
newvalue = value + (self.getVar(var, False) or "")
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def delVar(self, var, **loginfo):
|
||||
loginfo['detail'] = ""
|
||||
@@ -616,28 +570,12 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
if var in self.overridedata:
|
||||
del self.overridedata[var]
|
||||
if '_' in var:
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override and override.islower():
|
||||
try:
|
||||
if shortvar in self.overridedata:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].remove([var, override])
|
||||
except ValueError as e:
|
||||
pass
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if override and override in self._seen_overrides and var in self._seen_overrides[override]:
|
||||
self._seen_overrides[override].remove(var)
|
||||
|
||||
def setVarFlag(self, var, flag, value, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
loginfo['flag'] = flag
|
||||
@@ -646,10 +584,8 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
if flag == "_defaultval" and var in self.overridevars:
|
||||
self._setvar_update_overridevars(var, value)
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
if not "__exportlist" in self.dict:
|
||||
@@ -658,71 +594,14 @@ class DataSmart(MutableMapping):
|
||||
self.dict["__exportlist"]["_content"] = set()
|
||||
self.dict["__exportlist"]["_content"].add(var)
|
||||
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False, parsing=False):
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
|
||||
local_var = self._findVar(var)
|
||||
value = None
|
||||
if flag == "_content" and var in self.overridedata and not parsing:
|
||||
match = False
|
||||
active = {}
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
# What about double overrides both with "_" in the name?
|
||||
if o in self.overridesset:
|
||||
active[o] = r
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active[o] = r
|
||||
|
||||
mod = True
|
||||
while mod:
|
||||
mod = False
|
||||
for o in self.overrides:
|
||||
for a in active.copy():
|
||||
if a.endswith("_" + o):
|
||||
t = active[a]
|
||||
del active[a]
|
||||
active[a.replace("_" + o, "")] = t
|
||||
mod = True
|
||||
elif a == o:
|
||||
match = active[a]
|
||||
del active[a]
|
||||
if match:
|
||||
value = self.getVar(match)
|
||||
|
||||
if local_var is not None and value is None:
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
|
||||
|
||||
if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_append"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = value + r
|
||||
|
||||
if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_prepend"]:
|
||||
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = r + value
|
||||
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -731,30 +610,17 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_remove" in local_var:
|
||||
removes = []
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_remove"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
removes.extend(self.expand(r).split())
|
||||
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
filtered = filter(lambda v: v not in local_var["_removeactive"],
|
||||
value.split(" "))
|
||||
value = " ".join(filtered)
|
||||
if expand and var in self.expand_cache:
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
# flag == "_content" here
|
||||
self.expand_cache[var].value = value
|
||||
return value
|
||||
|
||||
def delVarFlag(self, var, flag, **loginfo):
|
||||
self.expand_cache = {}
|
||||
local_var = self._findVar(var)
|
||||
if not local_var:
|
||||
return
|
||||
@@ -784,7 +650,6 @@ class DataSmart(MutableMapping):
|
||||
self.setVarFlag(var, flag, newvalue, ignore=True)
|
||||
|
||||
def setVarFlags(self, var, flags, **loginfo):
|
||||
self.expand_cache = {}
|
||||
infer_caller_details(loginfo)
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -814,7 +679,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
|
||||
def delVarFlags(self, var, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
@@ -832,12 +696,13 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
|
||||
def createCopy(self):
|
||||
"""
|
||||
Create a copy of self by setting _data to self
|
||||
"""
|
||||
# we really want this to be a DataSmart...
|
||||
data = DataSmart()
|
||||
data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
|
||||
data.dict["_data"] = self.dict
|
||||
data.varhistory = self.varhistory.copy()
|
||||
data.varhistory.datasmart = data
|
||||
@@ -845,12 +710,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
data._tracking = self._tracking
|
||||
|
||||
data.overrides = None
|
||||
data.overridevars = copy.copy(self.overridevars)
|
||||
# Should really be a deepcopy but has heavy overhead.
|
||||
# Instead, we're careful with writes.
|
||||
data.overridedata = copy.copy(self.overridedata)
|
||||
|
||||
return data
|
||||
|
||||
def expandVarref(self, variable, parents=False):
|
||||
@@ -875,19 +734,12 @@ class DataSmart(MutableMapping):
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
overrides = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
if key == "_data":
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if key in overrides:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
klist.add(key)
|
||||
|
||||
@@ -896,21 +748,9 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return klist
|
||||
|
||||
self.need_overrides()
|
||||
for var in self.overridedata:
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
overrides.add(var)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
overrides.add(var)
|
||||
|
||||
for k in keylist(self.dict):
|
||||
yield k
|
||||
|
||||
for k in overrides:
|
||||
yield k
|
||||
|
||||
def __len__(self):
|
||||
return len(frozenset(self))
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ def get_class_handlers():
|
||||
return _handlers
|
||||
|
||||
def set_class_handlers(h):
|
||||
global _handlers
|
||||
_handlers = h
|
||||
|
||||
def clean_class_handlers():
|
||||
@@ -68,18 +67,12 @@ _ui_logfilters = {}
|
||||
_ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
_uiready = False
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in __builtins__:
|
||||
__builtins__['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception:
|
||||
etype, value, tb = sys.exc_info()
|
||||
@@ -92,8 +85,6 @@ def execute_handler(name, handler, event, d):
|
||||
raise
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del __builtins__['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -103,10 +94,10 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
continue
|
||||
execute_handler(name, handler, event, d)
|
||||
try:
|
||||
execute_handler(name, handler, event, d)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ui_queue = []
|
||||
@atexit.register
|
||||
@@ -114,7 +105,7 @@ def print_ui_queue():
|
||||
"""If we're exiting before a UI has been spawned, display any queued
|
||||
LogRecords to the console."""
|
||||
logger = logging.getLogger("BitBake")
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
from bb.msg import BBLogFormatter
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||
@@ -136,7 +127,7 @@ def print_ui_queue():
|
||||
logger.handle(event)
|
||||
|
||||
def fire_ui_handlers(event, d):
|
||||
if not _uiready:
|
||||
if not _ui_handlers:
|
||||
# No UI handlers registered yet, queue up the messages
|
||||
ui_queue.append(event)
|
||||
return
|
||||
@@ -177,7 +168,7 @@ def fire_from_worker(event, d):
|
||||
fire_ui_handlers(event, d)
|
||||
|
||||
noop = lambda _: None
|
||||
def register(name, handler, mask=None):
|
||||
def register(name, handler, mask=[]):
|
||||
"""Register an Event handler"""
|
||||
|
||||
# already registered
|
||||
@@ -216,14 +207,7 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler, mainui=False):
|
||||
if mainui:
|
||||
global _uiready
|
||||
_uiready = True
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
level, debug_domains = bb.msg.constructLogOptions()
|
||||
@@ -374,12 +358,11 @@ class BuildStarted(BuildBase, OperationStarted):
|
||||
|
||||
class BuildCompleted(BuildBase, OperationCompleted):
|
||||
"""bbmake build run completed"""
|
||||
def __init__(self, total, n, p, failures=0, interrupted=0):
|
||||
def __init__(self, total, n, p, failures = 0):
|
||||
if not failures:
|
||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
||||
else:
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
self._interrupted = interrupted
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class DiskFull(Event):
|
||||
@@ -394,7 +377,7 @@ class DiskFull(Event):
|
||||
class NoProvider(Event):
|
||||
"""No Provider for an Event"""
|
||||
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
|
||||
def __init__(self, item, runtime=False, dependees=None, reasons=[], close_matches=[]):
|
||||
Event.__init__(self)
|
||||
self._item = item
|
||||
self._runtime = runtime
|
||||
@@ -508,16 +491,6 @@ class TargetsTreeGenerated(Event):
|
||||
Event.__init__(self)
|
||||
self._model = model
|
||||
|
||||
class ReachableStamps(Event):
|
||||
"""
|
||||
An event listing all stamps reachable after parsing
|
||||
which the metadata may use to clean up stale data
|
||||
"""
|
||||
|
||||
def __init__(self, stamps):
|
||||
Event.__init__(self)
|
||||
self.stamps = stamps
|
||||
|
||||
class FilesMatchingFound(Event):
|
||||
"""
|
||||
Event when a list of files matching the supplied pattern has
|
||||
@@ -624,11 +597,11 @@ class MetadataEvent(Event):
|
||||
def __init__(self, eventtype, eventdata):
|
||||
Event.__init__(self)
|
||||
self.type = eventtype
|
||||
self._localdata = eventdata
|
||||
self.data = eventdata
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to run sanity checks, either raise errors or generate events as return status.
|
||||
Event to runs sanity checks, either raise errors or generate events as return status.
|
||||
"""
|
||||
def __init__(self, generateevents = True):
|
||||
Event.__init__(self)
|
||||
@@ -636,7 +609,7 @@ class SanityCheck(Event):
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has passed
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
|
||||
@@ -45,13 +45,6 @@ _checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
class BBFetchException(Exception):
|
||||
"""Class all fetch exceptions inherit from"""
|
||||
def __init__(self, message):
|
||||
@@ -61,24 +54,10 @@ class BBFetchException(Exception):
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class UntrustedUrl(BBFetchException):
|
||||
"""Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is not trusted and cannot be used" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
|
||||
class MalformedUrl(BBFetchException):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
def __init__(self, url):
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
@@ -392,11 +371,8 @@ def decodeurl(url):
|
||||
p = {}
|
||||
if parm:
|
||||
for s in parm.split(';'):
|
||||
if s:
|
||||
if not '=' in s:
|
||||
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
|
||||
@@ -464,7 +440,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d):
|
||||
for k in replacements:
|
||||
uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
|
||||
#bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
|
||||
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
|
||||
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if loc == 2:
|
||||
# Handle path manipulations
|
||||
basename = None
|
||||
@@ -543,7 +519,7 @@ def fetcher_compare_revisions(d):
|
||||
def mirror_from_string(data):
|
||||
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||
|
||||
def verify_checksum(ud, d, precomputed={}):
|
||||
def verify_checksum(ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
@@ -551,33 +527,18 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
|
||||
Returns a dict of checksums that can be stored in a done stamp file and
|
||||
passed in as precomputed parameter in a later call to avoid re-computing
|
||||
the checksums from the file. This allows verifying the checksums of the
|
||||
file against those in the recipe each time, rather than only after
|
||||
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
|
||||
"""
|
||||
|
||||
_MD5_KEY = "md5"
|
||||
_SHA256_KEY = "sha256"
|
||||
if not ud.method.supports_checksum(ud):
|
||||
return
|
||||
|
||||
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
|
||||
return {}
|
||||
|
||||
if _MD5_KEY in precomputed:
|
||||
md5data = precomputed[_MD5_KEY]
|
||||
else:
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
|
||||
if _SHA256_KEY in precomputed:
|
||||
sha256data = precomputed[_SHA256_KEY]
|
||||
else:
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
|
||||
if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if strict and not (ud.md5_expected or ud.sha256_expected):
|
||||
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
@@ -622,73 +583,6 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
|
||||
|
||||
return {
|
||||
_MD5_KEY: md5data,
|
||||
_SHA256_KEY: sha256data
|
||||
}
|
||||
|
||||
|
||||
def verify_donestamp(ud, d, origud=None):
|
||||
"""
|
||||
Check whether the done stamp file has the right checksums (if the fetch
|
||||
method supports them). If it doesn't, delete the done stamp and force
|
||||
a re-download.
|
||||
|
||||
Returns True, if the donestamp exists and is valid, False otherwise. When
|
||||
returning False, any existing done stamps are removed.
|
||||
"""
|
||||
if not os.path.exists(ud.donestamp):
|
||||
return False
|
||||
|
||||
if (not ud.method.supports_checksum(ud) or
|
||||
(origud and not origud.method.supports_checksum(origud))):
|
||||
# done stamp exists, checksums not supported; assume the local file is
|
||||
# current
|
||||
return True
|
||||
|
||||
if not os.path.exists(ud.localpath):
|
||||
# done stamp exists, but the downloaded file does not; the done stamp
|
||||
# must be incorrect, re-trigger the download
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
precomputed_checksums = {}
|
||||
# Only re-use the precomputed checksums if the donestamp is newer than the
|
||||
# file. Do not rely on the mtime of directories, though. If ud.localpath is
|
||||
# a directory, there will probably not be any checksums anyway.
|
||||
if (os.path.isdir(ud.localpath) or
|
||||
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
|
||||
try:
|
||||
with open(ud.donestamp, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
precomputed_checksums.update(pickled.load())
|
||||
except Exception as e:
|
||||
# Avoid the warnings on the upgrade path from emtpy done stamp
|
||||
# files to those containing the checksums.
|
||||
if not isinstance(e, EOFError):
|
||||
# Ignore errors, they aren't fatal
|
||||
logger.warn("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
|
||||
try:
|
||||
checksums = verify_checksum(ud, d, precomputed_checksums)
|
||||
# If the cache file did not have the checksums, compute and store them
|
||||
# as an upgrade path from the previous done stamp file format.
|
||||
if checksums != precomputed_checksums:
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
return True
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
|
||||
def update_stamp(ud, d):
|
||||
"""
|
||||
@@ -703,11 +597,8 @@ def update_stamp(ud, d):
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
else:
|
||||
checksums = verify_checksum(ud, d)
|
||||
# Store the checksums for later re-verification against the recipe
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
verify_checksum(ud, d)
|
||||
open(ud.donestamp, 'w').close()
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
@@ -721,18 +612,13 @@ def get_autorev(d):
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
return "AUTOINC"
|
||||
|
||||
def get_srcrev(d, method_name='sortable_revision'):
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the revsion string, usually for use in the version string (PV) of the current package
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
|
||||
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
|
||||
incremental, other code is then responsible for turning that into an increasing value (if needed)
|
||||
|
||||
A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
|
||||
that fetcher provides a method with the given name and the same signature as sortable_revision.
|
||||
"""
|
||||
|
||||
scms = []
|
||||
@@ -746,7 +632,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
|
||||
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
|
||||
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
if autoinc:
|
||||
@@ -764,7 +650,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
for scm in scms:
|
||||
ud = urldata[scm]
|
||||
for name in ud.names:
|
||||
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
|
||||
autoinc, rev = ud.method.sortable_revision(ud, d, name)
|
||||
seenautoinc = seenautoinc or autoinc
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
@@ -778,7 +664,7 @@ def localpath(url, d):
|
||||
fetcher = bb.fetch2.Fetch([url], d)
|
||||
return fetcher.localpath(url)
|
||||
|
||||
def runfetchcmd(cmd, d, quiet=False, cleanup=None):
|
||||
def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
"""
|
||||
Run cmd returning the command output
|
||||
Raise an error if interrupted or cmd fails
|
||||
@@ -798,14 +684,9 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
|
||||
'NO_PROXY', 'no_proxy',
|
||||
'ALL_PROXY', 'all_proxy',
|
||||
'GIT_PROXY_COMMAND',
|
||||
'GIT_SSL_CAINFO',
|
||||
'GIT_SMART_HTTP',
|
||||
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
|
||||
'SOCKS5_USER', 'SOCKS5_PASSWD']
|
||||
|
||||
if not cleanup:
|
||||
cleanup = []
|
||||
|
||||
for var in exportvars:
|
||||
val = d.getVar(var, True)
|
||||
if val:
|
||||
@@ -862,7 +743,7 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
replacements["BASENAME"] = origud.path.split("/")[-1]
|
||||
replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
|
||||
|
||||
def adduri(ud, uris, uds, mirrors):
|
||||
def adduri(ud, uris, uds):
|
||||
for line in mirrors:
|
||||
try:
|
||||
(find, replace) = line
|
||||
@@ -871,17 +752,6 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
newuri = uri_replace(ud, find, replace, replacements, ld)
|
||||
if not newuri or newuri in uris or newuri == origud.url:
|
||||
continue
|
||||
|
||||
if not trusted_network(ld, newuri):
|
||||
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
|
||||
continue
|
||||
|
||||
# Create a local copy of the mirrors minus the current line
|
||||
# this will prevent us from recursively processing the same line
|
||||
# as well as indirect recursion A -> B -> C -> A
|
||||
localmirrors = list(mirrors)
|
||||
localmirrors.remove(line)
|
||||
|
||||
try:
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
@@ -889,18 +759,16 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
# setup_localpath of file:// urls may fail, we should still see
|
||||
# if mirrors of the url exist
|
||||
adduri(newud, uris, uds, localmirrors)
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
uris.append(newuri)
|
||||
uds.append(newud)
|
||||
|
||||
adduri(newud, uris, uds, localmirrors)
|
||||
adduri(newud, uris, uds)
|
||||
|
||||
adduri(origud, uris, uds, mirrors)
|
||||
adduri(origud, uris, uds)
|
||||
|
||||
return uris, uds
|
||||
|
||||
@@ -917,19 +785,19 @@ def rename_bad_checksum(ud, suffix):
|
||||
bb.utils.movefile(ud.localpath, new_localpath)
|
||||
|
||||
|
||||
def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
def try_mirror_url(origud, ud, ld, check = False):
|
||||
# Return of None or a value means we're finished
|
||||
# False means try another url
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(fetch, ud, ld)
|
||||
found = ud.method.checkstatus(ud, ld)
|
||||
if found:
|
||||
return found
|
||||
return False
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
|
||||
ud.method.download(ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(ud, ld)
|
||||
@@ -945,13 +813,12 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
# Create donestamp in old format to avoid triggering a re-download
|
||||
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
|
||||
if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
|
||||
origud.method.download(origud, ld)
|
||||
if hasattr(origud.method,"build_mirror_data"):
|
||||
origud.method.build_mirror_data(origud, ld)
|
||||
@@ -985,7 +852,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
pass
|
||||
return False
|
||||
|
||||
def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
def try_mirrors(d, origud, mirrors, check = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
This method will be automatically called before the fetchers go.
|
||||
@@ -999,46 +866,11 @@ def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
uris, uds = build_mirroruris(origud, mirrors, ld)
|
||||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
|
||||
ret = try_mirror_url(origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def trusted_network(d, url):
|
||||
"""
|
||||
Use a trusted url during download if networking is enabled and
|
||||
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
|
||||
Note: modifies SRC_URI & mirrors.
|
||||
"""
|
||||
if d.getVar('BB_NO_NETWORK', True) == "1":
|
||||
return True
|
||||
|
||||
pkgname = d.expand(d.getVar('PN', False))
|
||||
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname)
|
||||
|
||||
if not trusted_hosts:
|
||||
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
|
||||
|
||||
# Not enabled.
|
||||
if not trusted_hosts:
|
||||
return True
|
||||
|
||||
scheme, network, path, user, passwd, param = decodeurl(url)
|
||||
|
||||
if not network:
|
||||
return True
|
||||
|
||||
network = network.lower()
|
||||
|
||||
for host in trusted_hosts.split(" "):
|
||||
host = host.lower()
|
||||
if host.startswith("*.") and ("." + network).endswith(host[1:]):
|
||||
return True
|
||||
if host == network:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def srcrev_internal_helper(ud, d, name):
|
||||
"""
|
||||
Return:
|
||||
@@ -1098,21 +930,22 @@ def get_checksum_file_list(d):
|
||||
ud = fetch.ud[u]
|
||||
|
||||
if ud and isinstance(ud.method, local.Local):
|
||||
paths = ud.method.localpaths(ud, d)
|
||||
for f in paths:
|
||||
pth = ud.decodedurl
|
||||
if '*' in pth:
|
||||
f = os.path.join(os.path.abspath(f), pth)
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
filelist.append(f + ":" + str(os.path.exists(f)))
|
||||
ud.setup_localpath(d)
|
||||
f = ud.localpath
|
||||
pth = ud.decodedurl
|
||||
if '*' in pth:
|
||||
f = os.path.join(os.path.abspath(f), pth)
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
filelist.append(f)
|
||||
|
||||
return " ".join(filelist)
|
||||
|
||||
|
||||
def get_file_checksums(filelist, pn):
|
||||
"""Get a list of the checksums for a list of local files
|
||||
|
||||
@@ -1142,10 +975,7 @@ def get_file_checksums(filelist, pn):
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
exist = pth.split(":")[1]
|
||||
if exist == "False":
|
||||
continue
|
||||
pth = pth.split(":")[0]
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
for f in glob.glob(pth):
|
||||
@@ -1153,11 +983,16 @@ def get_file_checksums(filelist, pn):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
checksums.append((f, checksum))
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
continue
|
||||
elif os.path.isdir(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
continue
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
@@ -1204,7 +1039,6 @@ class FetchData(object):
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.ignore_checksums = False
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -1277,7 +1111,7 @@ class FetchData(object):
|
||||
class FetchMethod(object):
|
||||
"""Base class for 'fetch'ing data"""
|
||||
|
||||
def __init__(self, urls=None):
|
||||
def __init__(self, urls = []):
|
||||
self.urls = []
|
||||
|
||||
def supports(self, urldata, d):
|
||||
@@ -1361,9 +1195,9 @@ class FetchMethod(object):
|
||||
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
|
||||
(file, urldata.parm.get('unpack')))
|
||||
|
||||
base, ext = os.path.splitext(file)
|
||||
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
|
||||
efile = os.path.join(rootdir, os.path.basename(base))
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -1383,10 +1217,6 @@ class FetchMethod(object):
|
||||
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.xz'):
|
||||
cmd = 'xz -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.tar.lz'):
|
||||
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.lz'):
|
||||
cmd = 'lzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip') or file.endswith('.jar'):
|
||||
try:
|
||||
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
|
||||
@@ -1432,13 +1262,8 @@ class FetchMethod(object):
|
||||
# items. So, only do so for file:// entries.
|
||||
if urldata.type == "file" and urldata.path.find("/") != -1:
|
||||
destdir = urldata.path.rsplit("/", 1)[0]
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir') + "/" + destdir
|
||||
else:
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir')
|
||||
else:
|
||||
destdir = "."
|
||||
destdir = "."
|
||||
bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
|
||||
cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
|
||||
|
||||
@@ -1483,7 +1308,7 @@ class FetchMethod(object):
|
||||
"""
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of a URL
|
||||
Assumes localpath was called first
|
||||
@@ -1515,7 +1340,7 @@ class FetchMethod(object):
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
|
||||
def __init__(self, urls, d, cache = True, localonly = False):
|
||||
if localonly and cache:
|
||||
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
|
||||
|
||||
@@ -1524,7 +1349,6 @@ class Fetch(object):
|
||||
self.urls = urls
|
||||
self.d = d
|
||||
self.ud = {}
|
||||
self.connection_cache = connection_cache
|
||||
|
||||
fn = d.getVar('FILE', True)
|
||||
if cache and fn and fn in urldata_cache:
|
||||
@@ -1562,11 +1386,11 @@ class Fetch(object):
|
||||
|
||||
return local
|
||||
|
||||
def download(self, urls=None):
|
||||
def download(self, urls = []):
|
||||
"""
|
||||
Fetch all urls
|
||||
"""
|
||||
if not urls:
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
network = self.d.getVar("BB_NO_NETWORK", True)
|
||||
@@ -1583,12 +1407,12 @@ class Fetch(object):
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
|
||||
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors, False)
|
||||
localpath = try_mirrors(self.d, ud, mirrors, False)
|
||||
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
@@ -1596,11 +1420,8 @@ class Fetch(object):
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
verified_stamp = verify_donestamp(ud, self.d)
|
||||
if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
|
||||
try:
|
||||
if not trusted_network(self.d, ud.url):
|
||||
raise UntrustedUrl(ud.url)
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
if hasattr(m, "build_mirror_data"):
|
||||
@@ -1625,11 +1446,10 @@ class Fetch(object):
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
# Remove any incomplete fetch
|
||||
if not verified_stamp:
|
||||
m.clean(ud, self.d)
|
||||
m.clean(ud, self.d)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors)
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
|
||||
if firsterr:
|
||||
@@ -1646,12 +1466,12 @@ class Fetch(object):
|
||||
finally:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
def checkstatus(self, urls=None):
|
||||
def checkstatus(self, urls = []):
|
||||
"""
|
||||
Check all urls exist upstream
|
||||
"""
|
||||
|
||||
if not urls:
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
for u in urls:
|
||||
@@ -1661,31 +1481,34 @@ class Fetch(object):
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
try:
|
||||
ret = m.checkstatus(self, ud, self.d)
|
||||
ret = m.checkstatus(ud, self.d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u, u)
|
||||
|
||||
def unpack(self, root, urls=None):
|
||||
def unpack(self, root, urls = []):
|
||||
"""
|
||||
Check all urls exist upstream
|
||||
"""
|
||||
|
||||
if not urls:
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
for u in urls:
|
||||
ud = self.ud[u]
|
||||
ud.setup_localpath(self.d)
|
||||
|
||||
if self.d.expand(self.localpath) is None:
|
||||
continue
|
||||
|
||||
if ud.lockfile:
|
||||
lf = bb.utils.lockfile(ud.lockfile)
|
||||
|
||||
@@ -1694,12 +1517,12 @@ class Fetch(object):
|
||||
if ud.lockfile:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
def clean(self, urls=None):
|
||||
def clean(self, urls = []):
|
||||
"""
|
||||
Clean files that the fetcher gets or places
|
||||
"""
|
||||
|
||||
if not urls:
|
||||
if len(urls) == 0:
|
||||
urls = self.urls
|
||||
|
||||
for url in urls:
|
||||
@@ -1721,42 +1544,6 @@ class Fetch(object):
|
||||
if ud.lockfile:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
class FetchConnectionCache(object):
|
||||
"""
|
||||
A class which represents an container for socket connections.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def get_connection_name(self, host, port):
|
||||
return host + ':' + str(port)
|
||||
|
||||
def add_connection(self, host, port, connection):
|
||||
cn = self.get_connection_name(host, port)
|
||||
|
||||
if cn not in self.cache:
|
||||
self.cache[cn] = connection
|
||||
|
||||
def get_connection(self, host, port):
|
||||
connection = None
|
||||
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
connection = self.cache[cn]
|
||||
|
||||
return connection
|
||||
|
||||
def remove_connection(self, host, port):
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
def close_connections(self):
|
||||
for cn in self.cache.keys():
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
from . import cvs
|
||||
from . import git
|
||||
from . import gitsm
|
||||
@@ -1771,7 +1558,6 @@ from . import bzr
|
||||
from . import hg
|
||||
from . import osc
|
||||
from . import repo
|
||||
from . import clearcase
|
||||
|
||||
methods.append(local.Local())
|
||||
methods.append(wget.Wget())
|
||||
@@ -1787,4 +1573,3 @@ methods.append(bzr.Bzr())
|
||||
methods.append(hg.Hg())
|
||||
methods.append(osc.Osc())
|
||||
methods.append(repo.Repo())
|
||||
methods.append(clearcase.ClearCase())
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' clearcase implementation
|
||||
|
||||
The clearcase fetcher is used to retrieve files from a ClearCase repository.
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
Supported SRC_URI options are:
|
||||
|
||||
- vob
|
||||
(required) The name of the clearcase VOB (with prepending "/")
|
||||
|
||||
- module
|
||||
The module in the selected VOB (with prepending "/")
|
||||
|
||||
The module and vob parameters are combined to create
|
||||
the following load rule in the view config spec:
|
||||
load <vob><module>
|
||||
|
||||
- proto
|
||||
http or https
|
||||
|
||||
Related variables:
|
||||
|
||||
CCASE_CUSTOM_CONFIG_SPEC
|
||||
Write a config spec to this variable in your recipe to use it instead
|
||||
of the default config spec generated by this fetcher.
|
||||
Please note that the SRCREV loses its functionality if you specify
|
||||
this variable. SRCREV is still used to label the archive after a fetch,
|
||||
but it doesn't define what's fetched.
|
||||
|
||||
User credentials:
|
||||
cleartool:
|
||||
The login of cleartool is handled by the system. No special steps needed.
|
||||
|
||||
rcleartool:
|
||||
In order to use rcleartool with authenticated users an `rcleartool login` is
|
||||
necessary before using the fetcher.
|
||||
"""
|
||||
# Copyright (C) 2014 Siemens AG
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from distutils import spawn
|
||||
|
||||
class ClearCase(FetchMethod):
|
||||
"""Class to fetch urls via 'clearcase'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with Clearcase.
|
||||
"""
|
||||
return ud.type in ['ccrc']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "ClearCase: %s", msg)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init ClearCase specific variable within url data
|
||||
"""
|
||||
ud.proto = "https"
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
if not ud.proto in ('http', 'https'):
|
||||
raise fetch2.ParameterError("Invalid protocol type", ud.url)
|
||||
|
||||
ud.vob = ''
|
||||
if 'vob' in ud.parm:
|
||||
ud.vob = ud.parm['vob']
|
||||
else:
|
||||
msg = ud.url+": vob must be defined so the fetcher knows what to get."
|
||||
raise MissingParameterError('vob', msg)
|
||||
|
||||
if 'module' in ud.parm:
|
||||
ud.module = ud.parm['module']
|
||||
else:
|
||||
ud.module = ""
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
|
||||
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV", False)
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
|
||||
ud.module.replace("/", "."),
|
||||
ud.label.replace("/", "."))
|
||||
|
||||
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
|
||||
ud.csname = "%s-config-spec" % (ud.identifier)
|
||||
ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
|
||||
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
|
||||
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
|
||||
ud.localfile = "%s.tar.gz" % (ud.identifier)
|
||||
|
||||
self.debug("host = %s" % ud.host)
|
||||
self.debug("path = %s" % ud.path)
|
||||
self.debug("server = %s" % ud.server)
|
||||
self.debug("proto = %s" % ud.proto)
|
||||
self.debug("type = %s" % ud.type)
|
||||
self.debug("vob = %s" % ud.vob)
|
||||
self.debug("module = %s" % ud.module)
|
||||
self.debug("basecmd = %s" % ud.basecmd)
|
||||
self.debug("label = %s" % ud.label)
|
||||
self.debug("ccasedir = %s" % ud.ccasedir)
|
||||
self.debug("viewdir = %s" % ud.viewdir)
|
||||
self.debug("viewname = %s" % ud.viewname)
|
||||
self.debug("configspecfile = %s" % ud.configspecfile)
|
||||
self.debug("localfile = %s" % ud.localfile)
|
||||
|
||||
ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _build_ccase_command(self, ud, command):
|
||||
"""
|
||||
Build up a commandline based on ud
|
||||
command is: mkview, setcs, rmview
|
||||
"""
|
||||
options = []
|
||||
|
||||
if "rcleartool" in ud.basecmd:
|
||||
options.append("-server %s" % ud.server)
|
||||
|
||||
basecmd = "%s %s" % (ud.basecmd, command)
|
||||
|
||||
if command is 'mkview':
|
||||
if not "rcleartool" in ud.basecmd:
|
||||
# Cleartool needs a -snapshot view
|
||||
options.append("-snapshot")
|
||||
options.append("-tag %s" % ud.viewname)
|
||||
options.append(ud.viewdir)
|
||||
|
||||
elif command is 'rmview':
|
||||
options.append("-force")
|
||||
options.append("%s" % ud.viewdir)
|
||||
|
||||
elif command is 'setcs':
|
||||
options.append("-overwrite")
|
||||
options.append(ud.configspecfile)
|
||||
|
||||
else:
|
||||
raise FetchError("Invalid ccase command %s" % command)
|
||||
|
||||
ccasecmd = "%s %s" % (basecmd, " ".join(options))
|
||||
self.debug("ccasecmd = %s" % ccasecmd)
|
||||
return ccasecmd
|
||||
|
||||
def _write_configspec(self, ud, d):
|
||||
"""
|
||||
Create config spec file (ud.configspecfile) for ccase view
|
||||
"""
|
||||
config_spec = ""
|
||||
custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
|
||||
if custom_config_spec is not None:
|
||||
for line in custom_config_spec.split("\\n"):
|
||||
config_spec += line+"\n"
|
||||
bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
|
||||
else:
|
||||
config_spec += "element * CHECKEDOUT\n"
|
||||
config_spec += "element * %s\n" % ud.label
|
||||
config_spec += "load %s%s\n" % (ud.vob, ud.module)
|
||||
|
||||
logger.info("Using config spec: \n%s" % config_spec)
|
||||
|
||||
with open(ud.configspecfile, 'w') as f:
|
||||
f.write(config_spec)
|
||||
|
||||
def _remove_view(self, ud, d):
|
||||
if os.path.exists(ud.viewdir):
|
||||
os.chdir(ud.ccasedir)
|
||||
cmd = self._build_ccase_command(ud, 'rmview');
|
||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("rmview output: %s", output)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
|
||||
ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
|
||||
return True
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
return False, ud.identifier
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# Make a fresh view
|
||||
bb.utils.mkdirhier(ud.ccasedir)
|
||||
self._write_configspec(ud, d)
|
||||
cmd = self._build_ccase_command(ud, 'mkview')
|
||||
logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
try:
|
||||
runfetchcmd(cmd, d)
|
||||
except FetchError as e:
|
||||
if "CRCLI2008E" in e.msg:
|
||||
raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
|
||||
else:
|
||||
raise e
|
||||
|
||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
||||
os.chdir(ud.viewdir)
|
||||
cmd = self._build_ccase_command(ud, 'setcs');
|
||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("%s", output)
|
||||
|
||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
||||
shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
|
||||
|
||||
# Clean clearcase meta-data before tar
|
||||
|
||||
runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
|
||||
|
||||
# Clean up so we can create a new view next time
|
||||
self.clean(ud, d);
|
||||
|
||||
def clean(self, ud, d):
|
||||
self._remove_view(ud, d)
|
||||
bb.utils.remove(ud.configspecfile)
|
||||
@@ -66,9 +66,7 @@ Supported SRC_URI options are:
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -125,7 +123,7 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = branch
|
||||
ud.unresolvedrev[name] = branch
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||
|
||||
@@ -138,10 +136,7 @@ class Git(FetchMethod):
|
||||
ud.unresolvedrev[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
if gitsrcname.startswith('.'):
|
||||
gitsrcname = gitsrcname[1:]
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -182,13 +177,20 @@ class Git(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.clonedir)
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
repourl = self._get_repo_url(ud)
|
||||
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
@@ -219,11 +221,7 @@ class Git(FetchMethod):
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
ud.repochanged = True
|
||||
os.chdir(ud.clonedir)
|
||||
for name in ud.names:
|
||||
if not self._contains_ref(ud, d, name):
|
||||
@@ -231,7 +229,7 @@ class Git(FetchMethod):
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
@@ -277,23 +275,14 @@ class Git(FetchMethod):
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
os.chdir(destdir)
|
||||
repourl = self._get_repo_url(ud)
|
||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||
elif not ud.nobranch:
|
||||
branchname = ud.branches[ud.names[0]]
|
||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
||||
ud.revisions[ud.names[0]]), d)
|
||||
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
|
||||
branchname), d)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||
|
||||
return True
|
||||
|
||||
def clean(self, ud, d):
|
||||
@@ -322,16 +311,6 @@ class Git(FetchMethod):
|
||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _get_repo_url(self, ud):
|
||||
"""
|
||||
Return the repository URL
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
@@ -342,9 +321,13 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Run git ls-remote with the specified search string
|
||||
"""
|
||||
repourl = self._get_repo_url(ud)
|
||||
cmd = "%s ls-remote %s %s" % \
|
||||
(ud.basecmd, repourl, search)
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(ud.basecmd, ud.proto, username, ud.host, ud.path, search)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
@@ -356,95 +339,17 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
pupver = ('', '')
|
||||
|
||||
tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return pupver
|
||||
|
||||
verstring = ""
|
||||
revision = ""
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
tag_head = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", tag_head)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(tag_head)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
|
||||
verstring = tag
|
||||
revision = line.split()[0]
|
||||
pupver = (verstring, revision)
|
||||
|
||||
return pupver
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def gitpkgv_revision(self, ud, d, name):
|
||||
"""
|
||||
Return a sortable revision number by counting commits in the history
|
||||
Based on gitpkgv.bblass in meta-openembedded
|
||||
"""
|
||||
rev = self._build_revision(ud, d, name)
|
||||
localpath = ud.localpath
|
||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
||||
if not os.path.exists(localpath):
|
||||
commits = None
|
||||
else:
|
||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
||||
from pipes import quote
|
||||
commits = bb.fetch2.runfetchcmd(
|
||||
"git rev-list %s -- | wc -l" % (quote(rev)),
|
||||
d, quiet=True).strip().lstrip('0')
|
||||
if commits:
|
||||
open(rev_file, "w").write("%d\n" % int(commits))
|
||||
else:
|
||||
commits = open(rev_file, "r").readline(128).strip()
|
||||
if commits:
|
||||
return False, "%s+%s" % (commits, rev[:7])
|
||||
else:
|
||||
return True, str(rev)
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -2,16 +2,6 @@
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git submodules implementation
|
||||
|
||||
Inherits from and extends the Git fetcher to retrieve submodules of a git repository
|
||||
after cloning.
|
||||
|
||||
SRC_URI = "gitsm://<see Git fetcher for syntax>"
|
||||
|
||||
See the Git fetcher, git://, for usage documentation.
|
||||
|
||||
NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2013 Richard Purdie
|
||||
@@ -109,7 +99,6 @@ class GitSM(Git):
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
||||
os.chdir(tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d)
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
self._set_relative_paths(tmpclonedir)
|
||||
|
||||
@@ -28,7 +28,6 @@ import os
|
||||
import sys
|
||||
import logging
|
||||
import bb
|
||||
import errno
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
@@ -44,13 +43,6 @@ class Hg(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Don't require checksums for local archives created from
|
||||
repository checkouts.
|
||||
"""
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init hg specific variable within url data
|
||||
@@ -60,12 +52,10 @@ class Hg(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "hg"
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
@@ -74,19 +64,7 @@ class Hg(FetchMethod):
|
||||
elif not ud.revision:
|
||||
ud.revision = self.latest_revision(ud, d)
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
||||
ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
|
||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
ud.localfile = ud.moddir
|
||||
ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
|
||||
|
||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
@@ -96,21 +74,14 @@ class Hg(FetchMethod):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.moddir):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
|
||||
host = ud.host
|
||||
@@ -127,7 +98,7 @@ class Hg(FetchMethod):
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command == "info":
|
||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
|
||||
@@ -140,22 +111,19 @@ class Hg(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
else:
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command == "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
|
||||
else:
|
||||
cmd = "%s pull" % (ud.basecmd)
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
||||
cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
@@ -166,36 +134,16 @@ class Hg(FetchMethod):
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
# Runnning pull in the repo
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d)
|
||||
try:
|
||||
os.unlink(ud.fullmirror)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
# No source found, clone it.
|
||||
if not os.path.exists(ud.moddir):
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
@@ -212,12 +160,14 @@ class Hg(FetchMethod):
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean the hg dir """
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
bb.utils.remove(ud.localpath, True)
|
||||
bb.utils.remove(ud.fullmirror)
|
||||
bb.utils.remove(ud.fullmirror + ".done")
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
@@ -238,41 +188,3 @@ class Hg(FetchMethod):
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.info("Creating tarball of hg repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def localpath(self, ud, d):
|
||||
return ud.pkgdir
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
"""
|
||||
Make a local clone or export for the url
|
||||
"""
|
||||
|
||||
revflag = "-r %s" % ud.revision
|
||||
subdir = ud.parm.get("destsuffix", ud.module)
|
||||
codir = "%s/%s" % (destdir, subdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata != "nokeep":
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
os.chdir(codir)
|
||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
os.chdir(ud.moddir)
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
|
||||
|
||||
@@ -51,41 +51,29 @@ class Local(FetchMethod):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
return self.localpaths(urldata, d)[-1]
|
||||
|
||||
def localpaths(self, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
searched = []
|
||||
path = urldata.decodedurl
|
||||
newpath = path
|
||||
if path[0] == "/":
|
||||
return [path]
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath, hist = bb.utils.which(filespath, path, history=True)
|
||||
searched.extend(hist)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
searched.append(newpath)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath, hist = bb.utils.which(filespath, ".", history=True)
|
||||
searched.extend(hist)
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return searched
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
searched.append(dldirfile)
|
||||
return searched
|
||||
return searched
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath = bb.utils.which(filespath, ".")
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return newpath
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
return dldirfile
|
||||
return newpath
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ud.url.find("*") != -1:
|
||||
@@ -112,7 +100,7 @@ class Local(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
|
||||
@@ -48,7 +48,7 @@ class Perforce(FetchMethod):
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host, port) = d.getVar('P4PORT', False).split(':')
|
||||
(host, port) = d.getVar('P4PORT').split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
@@ -103,15 +103,22 @@ class Perforce(FetchMethod):
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which-1]
|
||||
|
||||
base = self._strip_leading_slashes(base)
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""
|
||||
@@ -123,7 +130,7 @@ class Perforce(FetchMethod):
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot[:depot.rfind('/')]
|
||||
path = depot
|
||||
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ class SFTP(FetchMethod):
|
||||
"""Fetch urls"""
|
||||
|
||||
urlo = URI(ud.url)
|
||||
basecmd = 'sftp -oBatchMode=yes'
|
||||
basecmd = 'sftp -oPasswordAuthentication=no'
|
||||
port = ''
|
||||
if urlo.port:
|
||||
port = '-P %d' % urlo.port
|
||||
|
||||
@@ -87,8 +87,7 @@ class SSH(FetchMethod):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
|
||||
os.path.basename(os.path.normpath(path)))
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
|
||||
def download(self, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
|
||||
@@ -54,11 +54,6 @@ class Svn(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if not "path_spec" in ud.parm:
|
||||
ud.path_spec = ud.module
|
||||
else:
|
||||
ud.path_spec = ud.parm["path_spec"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||
@@ -106,8 +101,7 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
transportuser = ud.parm.get("transportuser", "")
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.path_spec)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
else:
|
||||
@@ -154,7 +148,7 @@ class Svn(FetchMethod):
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
# tar them up to a defined filename
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup = [ud.localpath])
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean SVN specific files and dirs """
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,8 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import SoupStrainer
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -101,442 +96,11 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
import urllib2, socket, httplib
|
||||
from urllib import addinfourl
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
class HTTPConnectionCache(httplib.HTTPConnection):
|
||||
if fetch.connection_cache:
|
||||
def connect(self):
|
||||
"""Connect to the host and port specified in __init__."""
|
||||
|
||||
sock = fetch.connection_cache.get_connection(self.host, self.port)
|
||||
if sock:
|
||||
self.sock = sock
|
||||
else:
|
||||
self.sock = socket.create_connection((self.host, self.port),
|
||||
self.timeout, self.source_address)
|
||||
fetch.connection_cache.add_connection(self.host, self.port, self.sock)
|
||||
|
||||
if self._tunnel_host:
|
||||
self._tunnel()
|
||||
|
||||
class CacheHTTPHandler(urllib2.HTTPHandler):
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnectionCache, req)
|
||||
|
||||
def do_open(self, http_class, req):
|
||||
"""Return an addinfourl object for the request, using http_class.
|
||||
|
||||
http_class must implement the HTTPConnection API from httplib.
|
||||
The addinfourl return value is a file-like object. It also
|
||||
has methods and attributes including:
|
||||
- info(): return a mimetools.Message object for the headers
|
||||
- geturl(): return the original request URL
|
||||
- code: HTTP status code
|
||||
"""
|
||||
host = req.get_host()
|
||||
if not host:
|
||||
raise urlllib2.URLError('no host given')
|
||||
|
||||
h = http_class(host, timeout=req.timeout) # will parse host:port
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
||||
if k not in headers))
|
||||
|
||||
# We want to make an HTTP/1.1 request, but the addinfourl
|
||||
# class isn't prepared to deal with a persistent connection.
|
||||
# It will try to read all remaining data from the socket,
|
||||
# which will block while the server waits for the next request.
|
||||
# So make sure the connection gets closed after the (only)
|
||||
# request.
|
||||
|
||||
# Don't close connection when connection_cache is enabled,
|
||||
if fetch.connection_cache is None:
|
||||
headers["Connection"] = "close"
|
||||
else:
|
||||
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
|
||||
|
||||
headers = dict(
|
||||
(name.title(), val) for name, val in headers.items())
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
proxy_auth_hdr = "Proxy-Authorization"
|
||||
if proxy_auth_hdr in headers:
|
||||
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
||||
# Proxy-Authorization should not be sent to origin
|
||||
# server.
|
||||
del headers[proxy_auth_hdr]
|
||||
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
||||
|
||||
try:
|
||||
h.request(req.get_method(), req.get_selector(), req.data, headers)
|
||||
except socket.error, err: # XXX what error?
|
||||
# Don't close connection when cache is enabled.
|
||||
if fetch.connection_cache is None:
|
||||
h.close()
|
||||
raise urllib2.URLError(err)
|
||||
else:
|
||||
try:
|
||||
r = h.getresponse(buffering=True)
|
||||
except TypeError: # buffering kw not supported
|
||||
r = h.getresponse()
|
||||
|
||||
# Pick apart the HTTPResponse object to get the addinfourl
|
||||
# object initialized properly.
|
||||
|
||||
# Wrap the HTTPResponse object in socket's file object adapter
|
||||
# for Windows. That adapter calls recv(), so delegate recv()
|
||||
# to read(). This weird wrapping allows the returned object to
|
||||
# have readline() and readlines() methods.
|
||||
|
||||
# XXX It might be better to extract the read buffering code
|
||||
# out of socket._fileobject() and into a base class.
|
||||
r.recv = r.read
|
||||
|
||||
# no data, just have to read
|
||||
r.read()
|
||||
class fp_dummy(object):
|
||||
def read(self):
|
||||
return ""
|
||||
def readline(self):
|
||||
return ""
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
|
||||
# Close connection when server request it.
|
||||
if fetch.connection_cache is not None:
|
||||
if 'Connection' in r.msg and r.msg['Connection'] == 'close':
|
||||
fetch.connection_cache.remove_connection(h.host, h.port)
|
||||
|
||||
return resp
|
||||
|
||||
def export_proxies(d):
|
||||
variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
|
||||
'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
|
||||
exported = False
|
||||
|
||||
for v in variables:
|
||||
if v in os.environ.keys():
|
||||
exported = True
|
||||
else:
|
||||
v_proxy = d.getVar(v, True)
|
||||
if v_proxy is not None:
|
||||
os.environ[v] = v_proxy
|
||||
exported = True
|
||||
|
||||
return exported
|
||||
|
||||
def head_method(self):
|
||||
return "HEAD"
|
||||
|
||||
exported_proxies = export_proxies(d)
|
||||
|
||||
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
|
||||
# see PEP-0476, this causes verification errors on some https servers
|
||||
# so disable by default.
|
||||
import ssl
|
||||
ssl_context = None
|
||||
if hasattr(ssl, '_create_unverified_context'):
|
||||
ssl_context = ssl._create_unverified_context()
|
||||
|
||||
if exported_proxies == True and ssl_context is not None:
|
||||
opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler,
|
||||
urllib2.HTTPSHandler(context=ssl_context))
|
||||
elif exported_proxies == False and ssl_context is not None:
|
||||
opener = urllib2.build_opener(CacheHTTPHandler,
|
||||
urllib2.HTTPSHandler(context=ssl_context))
|
||||
elif exported_proxies == True and ssl_context is None:
|
||||
opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler)
|
||||
else:
|
||||
opener = urllib2.build_opener(CacheHTTPHandler)
|
||||
|
||||
urllib2.Request.get_method = head_method
|
||||
urllib2.install_opener(opener)
|
||||
def checkstatus(self, ud, d):
|
||||
|
||||
uri = ud.url.split(";")[0]
|
||||
fetchcmd = self.basecmd + " --spider '%s'" % uri
|
||||
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
try:
|
||||
urllib2.urlopen(uri)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
pname = ''
|
||||
pver = ''
|
||||
ptype = ''
|
||||
|
||||
mdict = m.groupdict()
|
||||
if 'name' in mdict.keys():
|
||||
pname = mdict['name']
|
||||
if 'pver' in mdict.keys():
|
||||
pver = mdict['pver']
|
||||
if 'type' in mdict.keys():
|
||||
ptype = mdict['type']
|
||||
|
||||
bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
|
||||
|
||||
return (pname, pver, ptype)
|
||||
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.1000.', version)
|
||||
version = re.sub('(beta)+', '.100.', version)
|
||||
version = re.sub('(alpha)+', '.10.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return ""
|
||||
"""
|
||||
valid = 0
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
bb.debug(3, "line['href'] = '%s'" % (line['href']))
|
||||
bb.debug(3, "line = '%s'" % (str(line)))
|
||||
|
||||
newver = self._parse_path(package_regex, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(package_regex, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) < 0:
|
||||
version = newver
|
||||
|
||||
pupver = re.sub('_', '.', version[1])
|
||||
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, pupver or "N/A", current_version[1]))
|
||||
|
||||
if valid:
|
||||
return pupver
|
||||
|
||||
return ""
|
||||
|
||||
def _check_latest_version_by_dir(self, dirver, package, package_regex,
|
||||
current_version, ud, d):
|
||||
"""
|
||||
Scan every directory in order to get upstream version.
|
||||
"""
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group(2)
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
version_dir_new = ['', s.group(2), '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = s.group(1) + s.group(2)
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
ud.user, ud.pswd, {}])
|
||||
|
||||
pupver = self._check_latest_version(uri,
|
||||
package, package_regex, current_version, ud, d)
|
||||
if pupver:
|
||||
version[1] = pupver
|
||||
|
||||
version_dir = version_dir_new
|
||||
|
||||
return version[1]
|
||||
|
||||
def _init_regexes(self, package, ud, d):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
version = self._parse_path(package_regex_comp, package)
|
||||
if version:
|
||||
package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
|
||||
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
|
||||
else:
|
||||
package_custom_regex_comp = None
|
||||
|
||||
return package_custom_regex_comp
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
current_version = ['', d.getVar('PV', True), '']
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return (current_version[1], '')
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ('', '')
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
|
||||
m = dirver_regex.search(path)
|
||||
if m:
|
||||
pn = d.getVar('PN', True)
|
||||
dirver = m.group('dirver')
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return (self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d), '')
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return (self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d), '')
|
||||
|
||||
@@ -1,434 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(Exception):
|
||||
pass
|
||||
|
||||
def present_options(optionlist):
|
||||
if len(optionlist) > 1:
|
||||
return ' or '.join([', '.join(optionlist[:-1]), optionlist[-1]])
|
||||
else:
|
||||
return optionlist[0]
|
||||
|
||||
class BitbakeHelpFormatter(optparse.IndentedHelpFormatter):
|
||||
def format_option(self, option):
|
||||
# We need to do this here rather than in the text we supply to
|
||||
# add_option() because we don't want to call list_extension_modules()
|
||||
# on every execution (since it imports all of the modules)
|
||||
# Note also that we modify option.help rather than the returned text
|
||||
# - this is so that we don't have to re-format the text ourselves
|
||||
if option.dest == 'ui':
|
||||
valid_uis = list_extension_modules(bb.ui, 'main')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_uis))
|
||||
elif option.dest == 'servertype':
|
||||
valid_server_types = list_extension_modules(bb.server, 'BitBakeServer')
|
||||
option.help = option.help.replace('@CHOICES@', present_options(valid_server_types))
|
||||
|
||||
return optparse.IndentedHelpFormatter.format_option(self, option)
|
||||
|
||||
def list_extension_modules(pkg, checkattr):
|
||||
"""
|
||||
Lists extension modules in a specific Python package
|
||||
(e.g. UIs, servers). NOTE: Calling this function will import all of the
|
||||
submodules of the specified module in order to check for the specified
|
||||
attribute; this can have unusual side-effects. As a result, this should
|
||||
only be called when displaying help text or error messages.
|
||||
Parameters:
|
||||
pkg: previously imported Python package to list
|
||||
checkattr: attribute to look for in module to determine if it's valid
|
||||
as the type of extension you are looking for
|
||||
"""
|
||||
import pkgutil
|
||||
pkgdir = os.path.dirname(pkg.__file__)
|
||||
|
||||
modules = []
|
||||
for _, modulename, _ in pkgutil.iter_modules([pkgdir]):
|
||||
if os.path.isdir(os.path.join(pkgdir, modulename)):
|
||||
# ignore directories
|
||||
continue
|
||||
try:
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
except:
|
||||
# If we can't import it, it's not valid
|
||||
continue
|
||||
module_if = getattr(module, modulename)
|
||||
if getattr(module_if, 'hidden_extension', False):
|
||||
continue
|
||||
if not checkattr or hasattr(module_if, checkattr):
|
||||
modules.append(modulename)
|
||||
return modules
|
||||
|
||||
def import_extension_module(pkg, modulename, checkattr):
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__(pkg.__name__, fromlist = [modulename])
|
||||
return getattr(module, modulename)
|
||||
except AttributeError:
|
||||
raise BBMainException('FATAL: Unable to import extension module "%s" from %s. Valid extension modules: %s' % (modulename, pkg.__name__, present_options(list_extension_modules(pkg, checkattr))))
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
formatter = BitbakeHelpFormatter(),
|
||||
version = "BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage = """%prog [options] [recipename/target recipe:do_task ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
env_ui = os.environ.get('BITBAKE_UI', None)
|
||||
default_ui = env_ui or 'knotty'
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (@CHOICES@ - default %default).",
|
||||
action="store", dest="ui", default=default_ui)
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server type to use (@CHOICES@ - default %default).",
|
||||
action = "store", dest = "servertype", default = "process")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
if "BBEVENTLOG" is os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
def bitbake_main(configParams, configuration):
|
||||
|
||||
# Python multiprocessing requires /dev/shm on Linux
|
||||
if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = import_extension_module(bb.ui, configParams.ui, 'main')
|
||||
servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer')
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
|
||||
"servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
|
||||
"to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--remote-server' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if configParams.server_only:
|
||||
for param in ('prefile', 'postfile'):
|
||||
value = getattr(configParams, param)
|
||||
if value:
|
||||
setattr(configuration, "%s_server" % param, value)
|
||||
param = "%s_server" % param
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
@@ -52,10 +52,10 @@ def getMountedDev(path):
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parent's, then the current directory is a mount point
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parent's directory
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
@@ -77,7 +77,7 @@ def getDiskData(BBDirs, configuration):
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directory is located on the same
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
@@ -187,11 +187,11 @@ class diskMonitor:
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid printing too many warning messages
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid printing the message
|
||||
# repeatedly while waiting for the tasks to finish
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for k in self.devDict:
|
||||
self.preFreeS[k] = 0
|
||||
@@ -239,9 +239,11 @@ class diskMonitor:
|
||||
freeInode = st.f_favail
|
||||
|
||||
if minInode and freeInode < minInode:
|
||||
# Some filesystems use dynamic inodes so can't run out
|
||||
# (e.g. btrfs). This is reported by the inode count being 0.
|
||||
# Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
|
||||
# zero, this is a feature of the fs, we disable the inode
|
||||
# checking for such a fs.
|
||||
if st.f_files == 0:
|
||||
logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
|
||||
self.devDict[k][2] = None
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
|
||||
@@ -150,7 +150,7 @@ loggerDefaultVerbose = False
|
||||
loggerVerboseLogs = False
|
||||
loggerDefaultDomains = []
|
||||
|
||||
def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
def init_msgconfig(verbose, debug, debug_domains = []):
|
||||
"""
|
||||
Set default verbosity and debug levels config the logger
|
||||
"""
|
||||
@@ -158,10 +158,7 @@ def init_msgconfig(verbose, debug, debug_domains=None):
|
||||
bb.msg.loggerDefaultVerbose = verbose
|
||||
if verbose:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
if debug_domains:
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
else:
|
||||
bb.msg.loggerDefaultDomains = []
|
||||
bb.msg.loggerDefaultDomains = debug_domains
|
||||
|
||||
def constructLogOptions():
|
||||
debug = loggerDefaultDebugLevel
|
||||
|
||||
@@ -202,8 +202,8 @@ if __name__ == '__main__':
|
||||
print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
|
||||
print(rec5._replace(k=222).count(2)) # MyMixIn's
|
||||
|
||||
# Note that behavior: the standard namedtuple methods cannot be
|
||||
# overridden by a foreign mix-in -- even if the mix-in is declared
|
||||
# None that behavior: the standard namedtuple methods cannot be
|
||||
# overriden by a foreign mix-in -- even if the mix-in is declared
|
||||
# as the leftmost base class (but, obviously, you can override them
|
||||
# in the defined class or its subclasses):
|
||||
|
||||
|
||||
@@ -26,10 +26,9 @@ File parsers for the BitBake build tools.
|
||||
|
||||
handlers = []
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import logging
|
||||
import bb
|
||||
import bb.utils
|
||||
import bb.siggen
|
||||
@@ -50,11 +49,8 @@ class ParseError(Exception):
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipRecipe(Exception):
|
||||
"""Exception raised to skip this recipe"""
|
||||
|
||||
class SkipPackage(SkipRecipe):
|
||||
"""Exception raised to skip this recipe (use SkipRecipe in new code)"""
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
|
||||
__mtime_cache = {}
|
||||
def cached_mtime(f):
|
||||
@@ -71,23 +67,13 @@ def cached_mtime_noerror(f):
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_mtime(f):
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
if f in __mtime_cache:
|
||||
del __mtime_cache[f]
|
||||
return 0
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_cache(f):
|
||||
if f in __mtime_cache:
|
||||
logger.debug(1, "Updating mtime cache for %s" % f)
|
||||
update_mtime(f)
|
||||
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
if s not in deps:
|
||||
deps.append(s)
|
||||
@@ -95,7 +81,7 @@ def mark_dependency(d, f):
|
||||
|
||||
def check_dependency(d, f):
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
return s in deps
|
||||
|
||||
def supports(fn, data):
|
||||
@@ -128,13 +114,14 @@ def resolve_file(fn, d):
|
||||
for af in attempts:
|
||||
mark_dependency(d, af)
|
||||
if not newfn:
|
||||
raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
fn = newfn
|
||||
|
||||
mark_dependency(d, fn)
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError(errno.ENOENT, "file %s not found" % fn)
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
logger.debug(2, "LOAD %s", fn)
|
||||
return fn
|
||||
|
||||
# Used by OpenEmbedded metadata
|
||||
@@ -161,8 +148,8 @@ def vars_from_file(mypkg, d):
|
||||
def get_file_depends(d):
|
||||
'''Return the dependent files'''
|
||||
dep_files = []
|
||||
depends = d.getVar('__base_depends', False) or []
|
||||
depends = depends + (d.getVar('__depends', False) or [])
|
||||
depends = d.getVar('__base_depends', True) or []
|
||||
depends = depends + (d.getVar('__depends', True) or [])
|
||||
for (fn, _) in depends:
|
||||
dep_files.append(os.path.abspath(fn))
|
||||
return " ".join(dep_files)
|
||||
|
||||
@@ -85,7 +85,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True, parsing=True)
|
||||
return data.getVar(key, noweakdefault=True)
|
||||
|
||||
def eval(self, data):
|
||||
groupd = self.groupd
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -136,10 +136,10 @@ class DataNode(AstNode):
|
||||
if flag:
|
||||
data.setVarFlag(key, flag, val, **loginfo)
|
||||
else:
|
||||
data.setVar(key, val, parsing=True, **loginfo)
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%', '______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
@@ -152,13 +152,13 @@ class MethodNode(AstNode):
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
data.setVar(funcname, text, parsing=True)
|
||||
data.setVar(funcname, text)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, text, parsing=True)
|
||||
data.setVar(self.func_name, text)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, modulename, body):
|
||||
@@ -175,7 +175,7 @@ class PythonMethodNode(AstNode):
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text, parsing=True)
|
||||
data.setVar(self.function, text)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
@@ -184,7 +184,7 @@ class MethodFlagsNode(AstNode):
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key, False):
|
||||
if data.getVar(self.key):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
@@ -209,10 +209,10 @@ class ExportFuncsNode(AstNode):
|
||||
for func in self.n:
|
||||
calledfunc = self.classname + "_" + func
|
||||
|
||||
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func'):
|
||||
if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
|
||||
continue
|
||||
|
||||
if data.getVar(func, False):
|
||||
if data.getVar(func):
|
||||
data.setVarFlag(func, 'python', None)
|
||||
data.setVarFlag(func, 'func', None)
|
||||
|
||||
@@ -224,11 +224,9 @@ class ExportFuncsNode(AstNode):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
|
||||
|
||||
if data.getVarFlag(calledfunc, "python"):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
|
||||
else:
|
||||
if "-" in self.classname:
|
||||
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
|
||||
data.setVar(func, " " + calledfunc + "\n", parsing=True)
|
||||
data.setVar(func, " " + calledfunc + "\n")
|
||||
data.setVarFlag(func, 'export_func', '1')
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
@@ -255,7 +253,7 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS', False) or []
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
@@ -315,22 +313,23 @@ def handleInherit(statements, filename, lineno, m):
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
# try to add the handler
|
||||
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS", False) or []:
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
bb.build.add_tasks(tasklist, d)
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
deltasklist = d.getVar('__BBDELTASKS') or []
|
||||
bb.build.add_tasks(tasklist, deltasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
@@ -338,10 +337,8 @@ def finalize(fn, d, variant = None):
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def _create_variants(datastores, names, function):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
if onlyfinalise and name not in onlyfinalise:
|
||||
return
|
||||
new_d = bb.data.createCopy(orig_d)
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
@@ -378,7 +375,7 @@ def _expand_versions(versions):
|
||||
def multi_finalize(fn, d):
|
||||
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
||||
for append in appends:
|
||||
logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
|
||||
logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
|
||||
bb.parse.BBHandler.handle(append, d, True)
|
||||
|
||||
onlyfinalise = d.getVar("__ONLYFINALISE", False)
|
||||
@@ -387,7 +384,7 @@ def multi_finalize(fn, d):
|
||||
d = bb.data.createCopy(safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
datastores = {"": safe_d}
|
||||
|
||||
@@ -430,10 +427,10 @@ def multi_finalize(fn, d):
|
||||
verfunc(pv, d, safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
_create_variants(datastores, versions, verfunc, onlyfinalise)
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
extended = d.getVar("BBCLASSEXTEND", True) or ""
|
||||
if extended:
|
||||
@@ -463,14 +460,14 @@ def multi_finalize(fn, d):
|
||||
bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
|
||||
@@ -32,7 +32,7 @@ import bb.build, bb.utils
|
||||
from bb import data
|
||||
|
||||
from . import ConfHandler
|
||||
from .. import resolve_file, ast, logger, ParseError
|
||||
from .. import resolve_file, ast, logger
|
||||
from .ConfHandler import include, init
|
||||
|
||||
# For compatibility
|
||||
@@ -48,7 +48,7 @@ __def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
|
||||
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
@@ -69,14 +69,15 @@ def supports(fn, d):
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not os.path.isabs(file):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
dname = os.path.dirname(fn)
|
||||
bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
|
||||
for af in attempts:
|
||||
if af != abs_fn:
|
||||
@@ -85,11 +86,11 @@ def inherit(files, fn, lineno, d):
|
||||
file = abs_fn
|
||||
|
||||
if not file in __inherit_cache:
|
||||
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -119,23 +120,29 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
def handle(fn, d, include):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
|
||||
__body__ = []
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
|
||||
if include == 0:
|
||||
logger.debug(2, "BB %s: handle(data)", fn)
|
||||
else:
|
||||
logger.debug(2, "BB %s: handle(data, include)", fn)
|
||||
|
||||
base_name = os.path.basename(fn)
|
||||
(root, ext) = os.path.splitext(base_name)
|
||||
init(d)
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE', False)
|
||||
oldfile = d.getVar('FILE')
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -148,25 +155,20 @@ def handle(fn, d, include):
|
||||
statements = get_statements(fn, abs_fn, base_name)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
if ext != ".bbclass" and abs_fn != oldfile:
|
||||
if ext != ".bbclass":
|
||||
d.setVar('FILE', abs_fn)
|
||||
|
||||
try:
|
||||
statements.eval(d)
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
bb.data.setVar("__SKIPPED", True, d)
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
if __infunc__:
|
||||
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
|
||||
if __residue__:
|
||||
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
|
||||
|
||||
if ext != ".bbclass" and include == 0:
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
|
||||
return d
|
||||
@@ -176,8 +178,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__)
|
||||
__infunc__ = []
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
@@ -221,8 +223,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = [m.group("func") or "__anonymous", fn, lineno]
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__[0], m)
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import os
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"""
|
||||
^
|
||||
@@ -59,7 +58,7 @@ __require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR', False)
|
||||
topdir = data.getVar('TOPDIR')
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
|
||||
@@ -67,43 +66,40 @@ def init(data):
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(parentfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
"""
|
||||
if parentfn == fn: # prevent infinite recursion
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
parentfn = data.expand(parentfn)
|
||||
oldfn = data.expand(oldfn)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(parentfn)
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
bb.parse.handle(fn, data, True)
|
||||
except (IOError, OSError) as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
else:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
|
||||
else:
|
||||
raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno)
|
||||
ret = handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
@@ -119,7 +115,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE', False)
|
||||
oldfile = data.getVar('FILE')
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
|
||||
@@ -199,9 +199,7 @@ class PersistData(object):
|
||||
del self.data[domain][key]
|
||||
|
||||
def connect(database):
|
||||
connection = sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
connection.execute("pragma synchronous = off;")
|
||||
return connection
|
||||
return sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
|
||||
@@ -64,7 +64,7 @@ class Popen(subprocess.Popen):
|
||||
options.update(kwargs)
|
||||
subprocess.Popen.__init__(self, *args, **options)
|
||||
|
||||
def _logged_communicate(pipe, log, input, extrafiles):
|
||||
def _logged_communicate(pipe, log, input):
|
||||
if pipe.stdin:
|
||||
if input is not None:
|
||||
pipe.stdin.write(input)
|
||||
@@ -79,20 +79,6 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
for fobj, _ in extrafiles:
|
||||
bb.utils.nonblockingfd(fobj.fileno())
|
||||
rin.append(fobj)
|
||||
|
||||
def readextras(selected):
|
||||
for fobj, func in extrafiles:
|
||||
if fobj in selected:
|
||||
try:
|
||||
data = fobj.read()
|
||||
except IOError as err:
|
||||
if err.errno == errno.EAGAIN or err.errno == errno.EWOULDBLOCK:
|
||||
data = None
|
||||
if data is not None:
|
||||
func(data)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
@@ -114,27 +100,18 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
readextras(r)
|
||||
|
||||
finally:
|
||||
log.flush()
|
||||
|
||||
readextras([fobj for fobj, _ in extrafiles])
|
||||
|
||||
if pipe.stdout is not None:
|
||||
pipe.stdout.close()
|
||||
if pipe.stderr is not None:
|
||||
pipe.stderr.close()
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
"""Convenience function to run a command and return its output, raising an
|
||||
exception when the command fails"""
|
||||
|
||||
if not extrafiles:
|
||||
extrafiles = []
|
||||
|
||||
if isinstance(cmd, basestring) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
@@ -147,7 +124,7 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
raise CmdError(cmd, exc)
|
||||
|
||||
if log:
|
||||
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
|
||||
stdout, stderr = _logged_communicate(pipe, log, input)
|
||||
else:
|
||||
stdout, stderr = pipe.communicate(input)
|
||||
|
||||
|
||||
@@ -38,4 +38,4 @@ class ExitSignal(ShellError):
|
||||
|
||||
class ReturnSignal(ShellError):
|
||||
"""Exit signal."""
|
||||
pass
|
||||
pass
|
||||
@@ -97,7 +97,7 @@ class RunQueueScheduler(object):
|
||||
def __init__(self, runqueue, rqdata):
|
||||
"""
|
||||
The default scheduler just returns the first buildable task (the
|
||||
priority map is sorted by task number)
|
||||
priority map is sorted by task numer)
|
||||
"""
|
||||
self.rq = runqueue
|
||||
self.rqdata = rqdata
|
||||
@@ -186,7 +186,7 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
|
||||
"""
|
||||
A scheduler optimised to complete .bb files are quickly as possible. The
|
||||
priority map is sorted by task weight, but then reordered so once a given
|
||||
.bb file starts to build, it's completed as quickly as possible. This works
|
||||
.bb file starts to build, its completed as quickly as possible. This works
|
||||
well where disk space is at a premium and classes like OE's rm_work are in
|
||||
force.
|
||||
"""
|
||||
@@ -430,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -793,20 +793,9 @@ class RunQueueData:
|
||||
if self.cooker.configuration.invalidate_stamp:
|
||||
for (fn, target) in self.target_pairs:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
if not st.startswith("do_"):
|
||||
st = "do_%s" % st
|
||||
invalidate_task(fn, st, True)
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
|
||||
# Create and print to the logs a virtual/xxxx -> PN (fn) table
|
||||
virtmap = taskData.get_providermap()
|
||||
virtpnmap = {}
|
||||
for v in virtmap:
|
||||
virtpnmap[v] = self.dataCache.pkg_fn[virtmap[v]]
|
||||
bb.debug(2, "%s resolved to: %s (%s)" % (v, virtpnmap[v], virtmap[v]))
|
||||
if hasattr(bb.parse.siggen, "tasks_resolved"):
|
||||
bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCache)
|
||||
|
||||
# Iterate over the task list and call into the siggen code
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
todeal = set(range(len(self.runq_fnid)))
|
||||
while len(todeal) > 0:
|
||||
@@ -870,18 +859,15 @@ class RunQueue:
|
||||
|
||||
def _start_worker(self, fakeroot = False, rqexec = None):
|
||||
logger.debug(1, "Starting bitbake-worker")
|
||||
magic = "decafbad"
|
||||
if self.cooker.configuration.profile:
|
||||
magic = "decafbadbad"
|
||||
if fakeroot:
|
||||
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
|
||||
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
|
||||
env = os.environ.copy()
|
||||
for key, value in (var.split('=') for var in fakerootenv):
|
||||
env[key] = value
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
else:
|
||||
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
bb.utils.nonblockingfd(worker.stdout)
|
||||
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
|
||||
|
||||
@@ -890,7 +876,9 @@ class RunQueue:
|
||||
"fakerootenv" : self.rqdata.dataCache.fakerootenv,
|
||||
"fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
|
||||
"fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
|
||||
"sigdata" : bb.parse.siggen.get_taskdata(),
|
||||
"hashes" : bb.parse.siggen.taskhash,
|
||||
"hash_deps" : bb.parse.siggen.runtaskdeps,
|
||||
"sigchecksums" : bb.parse.siggen.file_checksum_values,
|
||||
"runq_hash" : self.rqdata.runq_hash,
|
||||
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
|
||||
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
|
||||
@@ -979,11 +967,11 @@ class RunQueue:
|
||||
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
|
||||
# If the stamp is missing, it's not current
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
logger.debug(2, "Stampfile %s not available", stampfile)
|
||||
return False
|
||||
# If it's a 'nostamp' task, it's not current
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
|
||||
@@ -1075,9 +1063,9 @@ class RunQueue:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
if self.state is runQueueCleanUp:
|
||||
retval = self.rqexe.finish()
|
||||
self.rqexe.finish()
|
||||
|
||||
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
self.teardown_workers()
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
@@ -1107,13 +1095,6 @@ class RunQueue:
|
||||
raise
|
||||
except SystemExit:
|
||||
raise
|
||||
except bb.BBHandledException:
|
||||
try:
|
||||
self.teardown_workers()
|
||||
except:
|
||||
pass
|
||||
self.state = runQueueComplete
|
||||
raise
|
||||
except:
|
||||
logger.error("An uncaught exception occured in runqueue, please see the failure below:")
|
||||
try:
|
||||
@@ -1125,7 +1106,6 @@ class RunQueue:
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
self.state = runQueueComplete
|
||||
return
|
||||
|
||||
if now:
|
||||
@@ -1172,14 +1152,9 @@ class RunQueue:
|
||||
sq_hash.append(self.rqdata.runq_hash[task])
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
try:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
# Handle version with no siginfo parameter
|
||||
except TypeError:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
|
||||
@@ -1291,9 +1266,6 @@ class RunQueueExecute:
|
||||
if rq.fakeworkerpipe:
|
||||
rq.fakeworkerpipe.setrunqueueexec(self)
|
||||
|
||||
if self.number_tasks <= 0:
|
||||
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
@@ -1332,14 +1304,15 @@ class RunQueueExecute:
|
||||
if self.stats.active > 0:
|
||||
bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
|
||||
self.rq.read_workers()
|
||||
return self.rq.active_fds()
|
||||
|
||||
return
|
||||
|
||||
if len(self.failed_fnids) != 0:
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
return
|
||||
|
||||
self.rq.state = runQueueComplete
|
||||
return True
|
||||
return
|
||||
|
||||
def check_dependencies(self, task, taskdeps, setscene = False):
|
||||
if not self.rq.depvalidate:
|
||||
@@ -1357,7 +1330,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1426,7 +1399,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1578,8 +1551,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
self.runq_running[task] = 1
|
||||
self.stats.taskActive()
|
||||
if not self.cooker.configuration.dry_run:
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
self.task_complete(task)
|
||||
return True
|
||||
else:
|
||||
@@ -1591,12 +1563,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
|
||||
if not self.rq.fakeworker:
|
||||
try:
|
||||
self.rq.start_fakeworker(self)
|
||||
except OSError as exc:
|
||||
logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
self.rq.start_fakeworker(self)
|
||||
self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
|
||||
self.rq.fakeworker.stdin.flush()
|
||||
else:
|
||||
@@ -1641,8 +1608,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
pn = self.rqdata.dataCache.pkg_fn[fn]
|
||||
taskname = self.rqdata.runq_task[revdep]
|
||||
deps = self.rqdata.runq_depends[revdep]
|
||||
provides = self.rqdata.dataCache.fn_provides[fn]
|
||||
taskdepdata[revdep] = [pn, taskname, fn, deps, provides]
|
||||
taskdepdata[revdep] = [pn, taskname, fn, deps]
|
||||
for revdep2 in deps:
|
||||
if revdep2 not in taskdepdata:
|
||||
additional.append(revdep2)
|
||||
@@ -1720,7 +1686,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
|
||||
process_endpoints(endpoints)
|
||||
|
||||
# Build a list of setscene tasks which are "unskippable"
|
||||
# Build a list of setscene tasks which as "unskippable"
|
||||
# These are direct endpoints referenced by the build
|
||||
endpoints2 = {}
|
||||
sq_revdeps2 = []
|
||||
@@ -1852,7 +1818,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
@@ -1876,10 +1842,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
realtask = self.rqdata.runq_setscene[task]
|
||||
realdep = self.rqdata.runq_setscene[dep]
|
||||
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
|
||||
self.scenequeue_updatecounters(dep, fail)
|
||||
continue
|
||||
if task not in self.sq_revdeps2[dep]:
|
||||
# May already have been removed by the fail case above
|
||||
continue
|
||||
self.sq_revdeps2[dep].remove(task)
|
||||
if len(self.sq_revdeps2[dep]) == 0:
|
||||
@@ -2021,10 +1983,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
self.rq.state = runQueueRunInit
|
||||
|
||||
completeevent = sceneQueueComplete(self.stats, self.rq)
|
||||
bb.event.fire(completeevent, self.cfgData)
|
||||
|
||||
return True
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
@@ -2077,7 +2035,7 @@ class sceneQueueEvent(runQueueEvent):
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was started
|
||||
Event notifing a task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2085,7 +2043,7 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task was started
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2093,7 +2051,7 @@ class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task failed
|
||||
Event notifing a task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2101,33 +2059,25 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task failed
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueComplete(sceneQueueEvent):
|
||||
"""
|
||||
Event when all the sceneQueue tasks are complete
|
||||
"""
|
||||
def __init__(self, stats, rq):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task completed
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
class sceneQueueTaskCompleted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task completed
|
||||
Event notifing a setscene task completed
|
||||
"""
|
||||
|
||||
class runQueueTaskSkipped(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was skipped
|
||||
Event notifing a task was skipped
|
||||
"""
|
||||
def __init__(self, task, stats, rq, reason):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
|
||||
@@ -38,18 +38,14 @@ from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
class ServerCommunicator():
|
||||
def __init__(self, connection, event_handle, server):
|
||||
def __init__(self, connection, event_handle):
|
||||
self.connection = connection
|
||||
self.event_handle = event_handle
|
||||
self.server = server
|
||||
|
||||
def runCommand(self, command):
|
||||
# @todo try/except
|
||||
self.connection.send(command)
|
||||
|
||||
if not self.server.is_alive():
|
||||
raise SystemExit
|
||||
|
||||
while True:
|
||||
# don't let the user ctrl-c while we're waiting for a response
|
||||
try:
|
||||
@@ -97,7 +93,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
def run(self):
|
||||
for event in bb.event.ui_queue:
|
||||
self.event_queue.put(event)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self, True)
|
||||
self.event_handle.value = bb.event.register_UIHhandler(self)
|
||||
|
||||
bb.cooker.server_main(self.cooker, self.main)
|
||||
|
||||
@@ -114,12 +110,8 @@ class ProcessServer(Process, BaseImplServer):
|
||||
if self.quitout.poll():
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
try:
|
||||
self.runCommand(["stateForceShutdown"])
|
||||
except:
|
||||
pass
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
logger.exception('Running command %s', command)
|
||||
|
||||
@@ -127,12 +119,9 @@ class ProcessServer(Process, BaseImplServer):
|
||||
bb.event.unregister_UIHhandler(self.event_handle.value)
|
||||
self.command_channel.close()
|
||||
self.cooker.shutdown(True)
|
||||
self.quitout.close()
|
||||
|
||||
def idle_commands(self, delay, fds=None):
|
||||
def idle_commands(self, delay, fds = []):
|
||||
nextsleep = delay
|
||||
if not fds:
|
||||
fds = []
|
||||
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
@@ -142,20 +131,14 @@ class ProcessServer(Process, BaseImplServer):
|
||||
nextsleep = None
|
||||
elif retval is True:
|
||||
nextsleep = None
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
elif nextsleep is None:
|
||||
continue
|
||||
else:
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as exc:
|
||||
if not isinstance(exc, bb.BBHandledException):
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
|
||||
if nextsleep is not None:
|
||||
select.select(fds,[],[],nextsleep)
|
||||
@@ -175,18 +158,14 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
self.procserver = serverImpl
|
||||
self.ui_channel = ui_channel
|
||||
self.event_queue = event_queue
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
|
||||
self.events = self.event_queue
|
||||
self.terminated = False
|
||||
|
||||
def sigterm_terminate(self):
|
||||
bb.error("UI received SIGTERM")
|
||||
self.terminate()
|
||||
|
||||
def terminate(self):
|
||||
if self.terminated:
|
||||
return
|
||||
self.terminated = True
|
||||
def flushevents():
|
||||
while True:
|
||||
try:
|
||||
@@ -218,20 +197,14 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
raise KeyboardInterrupt()
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def getEvent(self):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
return None
|
||||
@@ -246,7 +219,6 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
self.ui_channel, self.server_channel = Pipe()
|
||||
self.event_queue = ProcessEventQueue(0)
|
||||
self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
|
||||
self.event_queue.server = self.serverImpl
|
||||
|
||||
def detach(self):
|
||||
self.serverImpl.start()
|
||||
|
||||
@@ -80,7 +80,7 @@ class BBTransport(xmlrpclib.Transport):
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
class BitBakeServerCommands():
|
||||
@@ -99,7 +99,7 @@ class BitBakeServerCommands():
|
||||
if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
|
||||
return None
|
||||
|
||||
self.event_handle = bb.event.register_UIHhandler(s, True)
|
||||
self.event_handle = bb.event.register_UIHhandler(s)
|
||||
return self.event_handle
|
||||
|
||||
def unregisterEventHandler(self, handlerNum):
|
||||
@@ -235,16 +235,12 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
del self._idlefuns[function]
|
||||
elif retval is True:
|
||||
nextsleep = 0
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
else:
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
@@ -252,21 +248,14 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if retval == None:
|
||||
# the function execute failed; delete it
|
||||
del self._idlefuns[function]
|
||||
pass
|
||||
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
socktimeout = min(socktimeout, nextsleep)
|
||||
# Mirror what BaseServer handle_request would do
|
||||
try:
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
except IOError:
|
||||
# we ignore interrupted calls
|
||||
pass
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in self._idlefuns.items():
|
||||
@@ -281,26 +270,20 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
self.connection_token = token
|
||||
|
||||
class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = None):
|
||||
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = []):
|
||||
self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
|
||||
self.clientinfo = clientinfo
|
||||
self.serverImpl = serverImpl
|
||||
self.observer_only = observer_only
|
||||
if featureset:
|
||||
self.featureset = featureset
|
||||
self.featureset = featureset
|
||||
|
||||
def connect(self):
|
||||
if not self.observer_only:
|
||||
token = self.connection.addClient()
|
||||
else:
|
||||
self.featureset = []
|
||||
|
||||
def connect(self, token = None):
|
||||
if token is None:
|
||||
if self.observer_only:
|
||||
token = "observer"
|
||||
else:
|
||||
token = self.connection.addClient()
|
||||
|
||||
token = "observer"
|
||||
if token is None:
|
||||
return None
|
||||
|
||||
self.transport.set_connection_token(token)
|
||||
|
||||
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
|
||||
@@ -309,8 +292,6 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
|
||||
_, error = self.connection.runCommand(["setFeatures", self.featureset])
|
||||
if error:
|
||||
# disconnect the client, we can't make the setFeature work
|
||||
self.connection.removeClient()
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
@@ -351,9 +332,7 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
|
||||
class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
|
||||
def __init__(self, observer_only = False, token = None):
|
||||
self.token = token
|
||||
|
||||
def __init__(self, observer_only = False):
|
||||
self.observer_only = observer_only
|
||||
# if we need extra caches, just tell the server to load them all
|
||||
pass
|
||||
@@ -361,14 +340,37 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
def saveConnectionDetails(self, remote):
|
||||
self.remote = remote
|
||||
|
||||
def saveConnectionConfigParams(self, configParams):
|
||||
self.configParams = configParams
|
||||
|
||||
def establishConnection(self, featureset):
|
||||
# The format of "remote" must be "server:port"
|
||||
try:
|
||||
[host, port] = self.remote.split(":")
|
||||
port = int(port)
|
||||
except Exception as e:
|
||||
bb.warn("Failed to read remote definition (%s)" % str(e))
|
||||
raise e
|
||||
bb.fatal("Failed to read remote definition (%s)" % str(e))
|
||||
|
||||
# use automatic port if port set to -1, meaning read it from
|
||||
# the bitbake.lock file
|
||||
if port == -1:
|
||||
lock_location = "%s/bitbake.lock" % self.configParams.environment.get('BUILDDIR')
|
||||
lock = bb.utils.lockfile(lock_location, False, False)
|
||||
if lock:
|
||||
# This means there is no server running which we can
|
||||
# connect to on the local system.
|
||||
bb.utils.unlockfile(lock)
|
||||
return None
|
||||
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
self.remote = remotedef
|
||||
except Exception as e:
|
||||
bb.fatal("Failed to read bitbake.lock (%s)" % str(e))
|
||||
|
||||
# We need our IP for the server connection. We get the IP
|
||||
# by trying to connect with the server
|
||||
@@ -378,15 +380,13 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
ip = s.getsockname()[0]
|
||||
s.close()
|
||||
except Exception as e:
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
return self.connection.connect()
|
||||
except Exception as e:
|
||||
bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
|
||||
def endSession(self):
|
||||
self.connection.removeClient()
|
||||
|
||||
@@ -62,13 +62,6 @@ class SignatureGenerator(object):
|
||||
def dump_sigs(self, dataCache, options):
|
||||
return
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values = data
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
"""
|
||||
@@ -80,7 +73,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.taints = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
@@ -193,28 +185,22 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k][f] = cs
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
taskdep = dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
taint = str(uuid.uuid4())
|
||||
data = data + taint
|
||||
self.taints[k] = "nostamp:" + taint
|
||||
data = data + cs
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
self.taints[k] = taint
|
||||
logger.warn("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
|
||||
def set_taskdata(self, hashes, deps, checksums):
|
||||
self.runtaskdeps = deps
|
||||
self.taskhash = hashes
|
||||
self.file_checksum_values = checksums
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
k = fn + "." + task
|
||||
if runtime == "customfile":
|
||||
@@ -251,10 +237,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if taint:
|
||||
data['taint'] = taint
|
||||
|
||||
if runtime and k in self.taints:
|
||||
if 'nostamp:' in self.taints[k]:
|
||||
data['taint'] = self.taints[k]
|
||||
|
||||
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as stream:
|
||||
@@ -311,9 +293,10 @@ def dump_this_task(outfile, d):
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
||||
|
||||
def clean_basepath(a):
|
||||
b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
|
||||
if a.startswith("virtual:"):
|
||||
b = b + ":" + a.rsplit(":", 1)[0]
|
||||
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
||||
else:
|
||||
b = a.rsplit("/", 1)[1]
|
||||
return b
|
||||
|
||||
def clean_basepaths(a):
|
||||
@@ -322,12 +305,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -428,21 +405,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
else:
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
b = b_data['runtaskhashes']
|
||||
@@ -518,17 +480,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -41,7 +41,7 @@ class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
@@ -70,7 +70,6 @@ class TaskData:
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
self.allowincomplete = allowincomplete
|
||||
|
||||
self.skiplist = skiplist
|
||||
|
||||
@@ -514,7 +513,7 @@ class TaskData:
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
def fail_fnid(self, fnid, missing_list=None):
|
||||
def fail_fnid(self, fnid, missing_list = []):
|
||||
"""
|
||||
Mark a file as failed (unbuildable)
|
||||
Remove any references from build and runtime provider lists
|
||||
@@ -523,8 +522,6 @@ class TaskData:
|
||||
"""
|
||||
if fnid in self.failed_fnids:
|
||||
return
|
||||
if not missing_list:
|
||||
missing_list = []
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
|
||||
self.failed_fnids.append(fnid)
|
||||
for target in self.build_targets:
|
||||
@@ -538,7 +535,7 @@ class TaskData:
|
||||
if len(self.run_targets[target]) == 0:
|
||||
self.remove_runtarget(target, missing_list)
|
||||
|
||||
def remove_buildtarget(self, targetid, missing_list=None):
|
||||
def remove_buildtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a build target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
@@ -563,7 +560,7 @@ class TaskData:
|
||||
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
raise bb.providers.NoProvider(target)
|
||||
|
||||
def remove_runtarget(self, targetid, missing_list=None):
|
||||
def remove_runtarget(self, targetid, missing_list = []):
|
||||
"""
|
||||
Mark a run target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
@@ -597,10 +594,9 @@ class TaskData:
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets and not self.allowincomplete:
|
||||
if self.abort and targetid in self.external_targets:
|
||||
raise
|
||||
if not self.allowincomplete:
|
||||
self.remove_buildtarget(targetid)
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
@@ -612,18 +608,6 @@ class TaskData:
|
||||
break
|
||||
# self.dump_data()
|
||||
|
||||
def get_providermap(self):
|
||||
virts = []
|
||||
virtmap = {}
|
||||
|
||||
for name in self.build_names_index:
|
||||
if name.startswith("virtual/"):
|
||||
virts.append(name)
|
||||
for v in virts:
|
||||
if self.have_build_target(v):
|
||||
virtmap[v] = self.fn_index[self.get_provider(v)[0]]
|
||||
return virtmap
|
||||
|
||||
def dump_data(self):
|
||||
"""
|
||||
Dump some debug information on the internal data structures
|
||||
|
||||
@@ -24,30 +24,6 @@ import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
import bb.parse
|
||||
import logging
|
||||
|
||||
class LogRecord():
|
||||
def __enter__(self):
|
||||
logs = []
|
||||
class LogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
logs.append(record)
|
||||
logger = logging.getLogger("BitBake")
|
||||
handler = LogHandler()
|
||||
self.handler = handler
|
||||
logger.addHandler(handler)
|
||||
return logs
|
||||
def __exit__(self, type, value, traceback):
|
||||
logger = logging.getLogger("BitBake")
|
||||
logger.removeHandler(self.handler)
|
||||
return
|
||||
|
||||
def logContains(item, logs):
|
||||
for l in logs:
|
||||
m = l.getMessage()
|
||||
if item in m:
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -134,23 +110,17 @@ class DataExpansions(unittest.TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -196,28 +166,28 @@ class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo", False),
|
||||
d.getVar("bar", False))
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -270,13 +240,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
def test_append_unset(self):
|
||||
self.d.setVar("TEST_prepend", "${FOO}:")
|
||||
self.d.setVar("TEST_append", ":val2")
|
||||
self.d.setVar("TEST_append", ":${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
|
||||
|
||||
def test_remove(self):
|
||||
self.d.setVar("TEST", "${VAL} ${BAR}")
|
||||
self.d.setVar("TEST_remove", "val")
|
||||
@@ -296,20 +259,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "")
|
||||
|
||||
def test_remove_expansion(self):
|
||||
self.d.setVar("BAR", "Z")
|
||||
self.d.setVar("TEST", "${BAR}/X Y")
|
||||
self.d.setVar("TEST_remove", "${BAR}/X")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "Y")
|
||||
|
||||
def test_remove_expansion_items(self):
|
||||
self.d.setVar("TEST", "A B C D")
|
||||
self.d.setVar("BAR", "B D")
|
||||
self.d.setVar("TEST_remove", "${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "A C")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -325,66 +274,13 @@ class TestOverrides(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_one_override_unset(self):
|
||||
self.d.setVar("TEST2_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
|
||||
def test_multiple_combined_overrides(self):
|
||||
self.d.setVar("TEST_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_multiple_overrides_unset(self):
|
||||
self.d.setVar("TEST2_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
|
||||
|
||||
def test_keyexpansion_override(self):
|
||||
self.d.setVar("LOCAL", "local")
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_${LOCAL}", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_rename_override(self):
|
||||
self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
|
||||
self.d.setVar("OVERRIDES", "class-target")
|
||||
bb.data.update_data(self.d)
|
||||
self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
|
||||
self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
|
||||
|
||||
def test_underscore_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_some_val", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
self.d.setVar("OVERRIDES", "foo:bar:some_val")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
class TestKeyExpansion(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("BAR", "foo")
|
||||
|
||||
def test_keyexpand(self):
|
||||
self.d.setVar("VAL_${FOO}", "A")
|
||||
self.d.setVar("VAL_${BAR}", "B")
|
||||
with LogRecord() as logs:
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
|
||||
self.assertEqual(self.d.getVar("VAL_foo", True), "A")
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -403,39 +299,3 @@ class TestFlags(unittest.TestCase):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
class Contains(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("SOMEFLAG", "a b c")
|
||||
|
||||
def test_contains(self):
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
|
||||
|
||||
def test_contains_any(self):
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
|
||||
|
||||
@@ -24,7 +24,6 @@ import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -315,7 +314,6 @@ class URITest(unittest.TestCase):
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
@@ -327,7 +325,6 @@ class FetcherTest(unittest.TestCase):
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
@@ -393,28 +390,6 @@ class MirrorUriTest(FetcherTest):
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_mirror_of_mirror(self):
|
||||
# Test if mirror of a mirror works
|
||||
mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
|
||||
mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
|
||||
'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
|
||||
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
|
||||
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \
|
||||
"https://.*/[^/]* https://BBBB/B/B/B/ \n"
|
||||
|
||||
def test_recursive(self):
|
||||
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz',
|
||||
'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz',
|
||||
'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
|
||||
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
@@ -469,13 +444,6 @@ class FetcherLocalTest(FetcherTest):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
|
||||
def test_local_subdirparam(self):
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/a'])
|
||||
|
||||
def test_local_deepsubdirparam(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/dir/subdir/e'])
|
||||
|
||||
class FetcherNetworkTest(FetcherTest):
|
||||
|
||||
@@ -500,19 +468,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_file_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
os.mkdir(self.dldir + "/some2where")
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
@@ -584,43 +539,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
os.chdir(os.path.dirname(self.unpackdir))
|
||||
fetcher.unpack(self.unpackdir)
|
||||
|
||||
def test_trusted_network(self):
|
||||
# Ensure trusted_network returns False when the host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the *.host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_two_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://something.git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://*.someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
@@ -640,130 +558,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchLatestVersionTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid UPSTREAM_CHECK_GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver= ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("UPSTREAM_CHECK_URI", k[2])
|
||||
self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver = ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
|
||||
class FetchCheckStatusTest(FetcherTest):
|
||||
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
|
||||
"http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
|
||||
"http://www.cups.org/",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
|
||||
"https://yoctoproject.org/",
|
||||
"https://yoctoproject.org/documentation",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
|
||||
]
|
||||
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
|
||||
def test_wget_checkstatus(self):
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d)
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
|
||||
def test_wget_checkstatus_connection_cache(self):
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
connection_cache = FetchConnectionCache()
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d,
|
||||
connection_cache = connection_cache)
|
||||
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
connection_cache.close_connections()
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Test for lib/bb/parse/
|
||||
#
|
||||
# Copyright (C) 2015 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import logging
|
||||
import bb
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('BitBake.TestParse')
|
||||
|
||||
import bb.parse
|
||||
import bb.data
|
||||
import bb.siggen
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
|
||||
testfile = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
do_install() {
|
||||
echo "hello"
|
||||
}
|
||||
|
||||
C = "3"
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
bb.parse.siggen = bb.siggen.init(self.d)
|
||||
|
||||
def parsehelper(self, content, suffix = ".bb"):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = suffix)
|
||||
f.write(content)
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
def test_parse_simple(self):
|
||||
f = self.parsehelper(self.testfile)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), "1")
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
self.assertEqual(d.getVar("C", True), "3")
|
||||
|
||||
def test_parse_incomplete_function(self):
|
||||
testfileB = self.testfile.replace("}", "")
|
||||
f = self.parsehelper(testfileB)
|
||||
with self.assertRaises(bb.parse.ParseError):
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
|
||||
overridetest = """
|
||||
RRECOMMENDS_${PN} = "a"
|
||||
RRECOMMENDS_${PN}_libc = "b"
|
||||
OVERRIDES = "libc:${PN}"
|
||||
PN = "gtk+"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
d.setVar("RRECOMMENDS_gtk+", "c")
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
|
||||
|
||||
overridetest2 = """
|
||||
EXTRA_OECONF = ""
|
||||
EXTRA_OECONF_class-target = "b"
|
||||
EXTRA_OECONF_append = " c"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest2)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
d.appendVar("EXTRA_OECONF", " d")
|
||||
d.setVar("OVERRIDES", "class-target")
|
||||
self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
|
||||
|
||||
overridetest3 = """
|
||||
DESCRIPTION = "A"
|
||||
DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
|
||||
PN = "bc"
|
||||
"""
|
||||
|
||||
def test_parse_combinations(self):
|
||||
f = self.parsehelper(self.overridetest3)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
|
||||
d.setVar("DESCRIPTION", "E")
|
||||
d.setVar("DESCRIPTION_bc-dev", "C D")
|
||||
d.setVar("OVERRIDES", "bc-dev")
|
||||
self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
|
||||
|
||||
|
||||
classextend = """
|
||||
VAR_var_override1 = "B"
|
||||
EXTRA = ":override1"
|
||||
OVERRIDES = "nothing${EXTRA}"
|
||||
|
||||
BBCLASSEXTEND = "###CLASS###"
|
||||
"""
|
||||
classextend_bbclass = """
|
||||
EXTRA = ""
|
||||
python () {
|
||||
d.renameVar("VAR_var", "VAR_var2")
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# Test based upon a real world data corruption issue. One
|
||||
# data store changing a variable poked through into a different data
|
||||
# store. This test case replicates that issue where the value 'B' would
|
||||
# become unset/disappear.
|
||||
#
|
||||
def test_parse_classextend_contamination(self):
|
||||
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
|
||||
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
|
||||
self.classextend = self.classextend.replace("###CLASS###", cls.name)
|
||||
f = self.parsehelper(self.classextend)
|
||||
alldata = bb.parse.handle(f.name, self.d)
|
||||
d1 = alldata['']
|
||||
d2 = alldata[cls.name]
|
||||
self.assertEqual(d1.getVar("VAR_var", True), "B")
|
||||
self.assertEqual(d2.getVar("VAR_var", True), None)
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -37,10 +36,6 @@ class VerCmpString(unittest.TestCase):
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
|
||||
self.assertTrue(result > 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
@@ -106,476 +101,3 @@ class Path(unittest.TestCase):
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
|
||||
class EditMetadataFile(unittest.TestCase):
|
||||
_origfile = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
def _testeditfile(self, varvalues, compareto, dummyvars=None):
|
||||
if dummyvars is None:
|
||||
dummyvars = []
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(self._origfile)
|
||||
tf.close()
|
||||
try:
|
||||
varcalls = []
|
||||
def handle_file(varname, origvalue, op, newlines):
|
||||
self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
|
||||
self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
|
||||
varcalls.append(varname)
|
||||
return varvalues[varname]
|
||||
|
||||
bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
|
||||
with open(tf.name) as f:
|
||||
modfile = f.readlines()
|
||||
# Ensure the output matches the expected output
|
||||
self.assertEqual(compareto.splitlines(True), modfile)
|
||||
# Ensure the callback function was called for every variable we asked for
|
||||
# (plus allow testing behaviour when a requested variable is not present)
|
||||
self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_edit_metadata_file_nochange(self):
|
||||
# Test file doesn't get modified with nothing to do
|
||||
self._testeditfile({}, self._origfile)
|
||||
# Test file doesn't get modified with only dummy variables
|
||||
self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
|
||||
'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
|
||||
# Test file doesn't get modified with some the same values
|
||||
self._testeditfile({'THIS': ('that', None, 0, True),
|
||||
'OTHER': ('anothervalue', None, 0, True),
|
||||
'MULTILINE3': (' c1 c2 c3', None, 4, False)}, self._origfile)
|
||||
|
||||
def test_edit_metadata_file_1(self):
|
||||
|
||||
newfile1 = """
|
||||
# A comment
|
||||
HELLO = "newvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
|
||||
|
||||
|
||||
def test_edit_metadata_file_2(self):
|
||||
|
||||
newfile2 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = " \\
|
||||
d1 \\
|
||||
d2 \\
|
||||
d3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = "nowsingle"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
|
||||
'MULTILINE3': ('nowsingle', None, 4, True),
|
||||
'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
|
||||
|
||||
|
||||
def test_edit_metadata_file_3(self):
|
||||
|
||||
newfile3 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = "yetanothervalue"
|
||||
|
||||
MULTILINE = "e1 \\
|
||||
e2 \\
|
||||
e3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := "f1 \\
|
||||
\tf2 \\
|
||||
\t"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
othercommand_one a b c
|
||||
othercommand_two d e f
|
||||
}
|
||||
"""
|
||||
|
||||
self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
|
||||
'MULTILINE2': (['f1', 'f2'], None, '\t', True),
|
||||
'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
|
||||
'THIS': (None, None, 0, False),
|
||||
'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
|
||||
|
||||
|
||||
def test_edit_metadata_file_4(self):
|
||||
|
||||
newfile4 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
"""
|
||||
|
||||
self._testeditfile({'NOCHANGE': (None, None, 0, False),
|
||||
'MULTILINE3': (None, None, 0, False),
|
||||
'THIS': ('that', None, 0, False),
|
||||
'do_functionname()': (None, None, 0, False)}, newfile4)
|
||||
|
||||
|
||||
def test_edit_metadata(self):
|
||||
newfile5 = """
|
||||
# A comment
|
||||
HELLO = "hithere"
|
||||
|
||||
# A new comment
|
||||
THIS += "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
NEWVAR = "value"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname == 'THIS':
|
||||
newlines.append('# A new comment\n')
|
||||
elif varname == 'do_functionname()':
|
||||
newlines.append('NEWVAR = "value"\n')
|
||||
newlines.append('\n')
|
||||
valueitem = varvalues.get(varname, None)
|
||||
if valueitem:
|
||||
return valueitem
|
||||
else:
|
||||
return (origvalue, op, 0, True)
|
||||
|
||||
varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
|
||||
varlist = ['HELLO', 'THIS', 'do_functionname()']
|
||||
(updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
|
||||
self.assertTrue(updated, 'List should be updated but isn\'t')
|
||||
self.assertEqual(newlines, newfile5.splitlines(True))
|
||||
|
||||
|
||||
class EditBbLayersConf(unittest.TestCase):
|
||||
|
||||
def _test_bblayers_edit(self, before, after, add, remove, notadded, notremoved):
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(before)
|
||||
tf.close()
|
||||
try:
|
||||
actual_notadded, actual_notremoved = bb.utils.edit_bblayers_conf(tf.name, add, remove)
|
||||
with open(tf.name) as f:
|
||||
actual_after = f.readlines()
|
||||
self.assertEqual(after.splitlines(True), actual_after)
|
||||
self.assertEqual(notadded, actual_notadded)
|
||||
self.assertEqual(notremoved, actual_notremoved)
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_bblayers_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
None,
|
||||
'/home/user/path/layer2',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/other/path/to/layer5/',
|
||||
None,
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/subpath/layer3 \
|
||||
/home/user/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer4 \
|
||||
/other/path/to/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/other/path/to/layer5', '/home/user/path/layer2/'], '/home/user/path/subpath/layer3/',
|
||||
['/home/user/path/layer2'],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_home(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer1 \
|
||||
~/path/layer2 \
|
||||
~/otherpath/layer3 \
|
||||
~/path/layer4 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS = " \
|
||||
~/path/layer2 \
|
||||
~/path/layer4 \
|
||||
~/path2/layer5 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
[os.environ['HOME'] + '/path/layer4', '~/path2/layer5'],
|
||||
[os.environ['HOME'] + '/otherpath/layer3', '~/path/layer1', '~/path/notinlist'],
|
||||
[os.environ['HOME'] + '/path/layer4'],
|
||||
['~/path/notinlist'])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
'/home/user/path/layer3',
|
||||
'/home/user/path/layer1',
|
||||
[],
|
||||
[])
|
||||
|
||||
|
||||
def test_bblayers_add_remove_plusequals2(self):
|
||||
before = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer1 \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer4"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
"""
|
||||
after = r"""
|
||||
# A comment
|
||||
|
||||
BBPATH = "${TOPDIR}"
|
||||
BBFILES ?= ""
|
||||
BBLAYERS += " \
|
||||
/home/user/path/layer2 \
|
||||
/home/user/path/layer3 \
|
||||
"
|
||||
BBLAYERS += "/home/user/path/layer5"
|
||||
BBLAYERS += "/home/user/otherpath/layer6"
|
||||
"""
|
||||
self._test_bblayers_edit(before, after,
|
||||
['/home/user/otherpath/layer6', '/home/user/path/layer3'], ['/home/user/path/layer1', '/home/user/path/layer4', '/home/user/path/layer7'],
|
||||
['/home/user/path/layer3'],
|
||||
['/home/user/path/layer7'])
|
||||
|
||||
@@ -25,33 +25,30 @@ import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state, BBCooker, CookerFeatures
|
||||
from bb.cooker import state, BBCooker
|
||||
from bb.cookerdata import CookerConfiguration, ConfigParameters
|
||||
import bb.fetch2
|
||||
|
||||
class Tinfoil:
|
||||
def __init__(self, output=sys.stdout, tracking=False):
|
||||
def __init__(self, output=sys.stdout):
|
||||
# Needed to avoid deprecation warnings with python 2.6
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
# Set up logging
|
||||
self.logger = logging.getLogger('BitBake')
|
||||
self._log_hdlr = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(self._log_hdlr)
|
||||
console = logging.StreamHandler(output)
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
self._log_hdlr.setFormatter(format)
|
||||
self.logger.addHandler(self._log_hdlr)
|
||||
console.setFormatter(format)
|
||||
self.logger.addHandler(console)
|
||||
|
||||
self.config = CookerConfiguration()
|
||||
configparams = TinfoilConfigParameters(parse_only=True)
|
||||
self.config.setConfigParameters(configparams)
|
||||
self.config.setServerRegIdleCallback(self.register_idle_function)
|
||||
features = []
|
||||
if tracking:
|
||||
features.append(CookerFeatures.BASEDATASTORE_TRACKING)
|
||||
self.cooker = BBCooker(self.config, features)
|
||||
self.cooker = BBCooker(self.config)
|
||||
self.config_data = self.cooker.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
@@ -84,19 +81,13 @@ class Tinfoil:
|
||||
else:
|
||||
self.parseRecipes()
|
||||
|
||||
def shutdown(self):
|
||||
self.cooker.shutdown(force=True)
|
||||
self.cooker.post_serve()
|
||||
self.cooker.unlockBitbake()
|
||||
self.logger.removeHandler(self._log_hdlr)
|
||||
|
||||
class TinfoilConfigParameters(ConfigParameters):
|
||||
|
||||
def __init__(self, **options):
|
||||
self.initial_options = options
|
||||
super(TinfoilConfigParameters, self).__init__()
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
class DummyOptions:
|
||||
def __init__(self, initial_options):
|
||||
for key, val in initial_options.items():
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -309,7 +309,7 @@ class Parameters:
|
||||
|
||||
def hob_conf_filter(fn, data):
|
||||
if fn.endswith("/local.conf"):
|
||||
distro = data.getVar("DISTRO_HOB", False)
|
||||
distro = data.getVar("DISTRO_HOB")
|
||||
if distro:
|
||||
if distro != "defaultsetup":
|
||||
data.setVar("DISTRO", distro)
|
||||
@@ -320,13 +320,13 @@ def hob_conf_filter(fn, data):
|
||||
"BB_NUMBER_THREADS_HOB", "PARALLEL_MAKE_HOB", "DL_DIR_HOB", \
|
||||
"SSTATE_DIR_HOB", "SSTATE_MIRRORS_HOB", "INCOMPATIBLE_LICENSE_HOB"]
|
||||
for key in keys:
|
||||
var_hob = data.getVar(key, False)
|
||||
var_hob = data.getVar(key)
|
||||
if var_hob:
|
||||
data.setVar(key.split("_HOB")[0], var_hob)
|
||||
return
|
||||
|
||||
if fn.endswith("/bblayers.conf"):
|
||||
layers = data.getVar("BBLAYERS_HOB", False)
|
||||
layers = data.getVar("BBLAYERS_HOB")
|
||||
if layers:
|
||||
data.setVar("BBLAYERS", layers)
|
||||
return
|
||||
|
||||
@@ -230,7 +230,10 @@ class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
self.configuration.sstatemirror = ""
|
||||
for mirror in self.sstatemirrors_list:
|
||||
if mirror[1] != "" and mirror[2].startswith("file://"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
if mirror[1].endswith("\\1"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
else:
|
||||
smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
|
||||
self.configuration.sstatemirror += smirror
|
||||
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
|
||||
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
|
||||
|
||||
@@ -440,17 +440,11 @@ class HobHandler(gobject.GObject):
|
||||
self.commands_async.append(self.SUB_BUILD_RECIPES)
|
||||
self.run_next_command(self.GENERATE_PACKAGES)
|
||||
|
||||
def generate_image(self, image, base_image, image_packages=None, toolchain_packages=None, default_task="build"):
|
||||
def generate_image(self, image, base_image, image_packages=[], toolchain_packages=[], default_task="build"):
|
||||
self.image = image
|
||||
self.base_image = base_image
|
||||
if image_packages:
|
||||
self.package_queue = image_packages
|
||||
else:
|
||||
self.package_queue = []
|
||||
if toolchain_packages:
|
||||
self.toolchain_packages = toolchain_packages
|
||||
else:
|
||||
self.toolchain_packages = []
|
||||
self.package_queue = image_packages
|
||||
self.toolchain_packages = toolchain_packages
|
||||
self.default_task = default_task
|
||||
self.runCommand(["setPrePostConfFiles", "conf/.hob.conf", ""])
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import gobject
|
||||
import gtk
|
||||
import Queue
|
||||
@@ -199,7 +198,7 @@ def main(server, eventHandler, params):
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
if 'msg' in cmdline and cmdline['msg']:
|
||||
print(cmdline['msg'])
|
||||
logger.error(cmdline['msg'])
|
||||
return 1
|
||||
cmdline = cmdline['action']
|
||||
if not cmdline or cmdline[0] != "generateDotGraph":
|
||||
@@ -216,12 +215,6 @@ def main(server, eventHandler, params):
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
try:
|
||||
gtk.init_check()
|
||||
except RuntimeError:
|
||||
sys.stderr.write("Please set DISPLAY variable before running this command \n")
|
||||
return
|
||||
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
@@ -243,7 +236,7 @@ def main(server, eventHandler, params):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
break
|
||||
|
||||
@@ -104,11 +104,10 @@ class InteractConsoleLogFilter(logging.Filter):
|
||||
return True
|
||||
|
||||
class TerminalFilter(object):
|
||||
rows = 25
|
||||
columns = 80
|
||||
|
||||
def sigwinch_handle(self, signum, frame):
|
||||
self.rows, self.columns = self.getTerminalColumns()
|
||||
self.columns = self.getTerminalColumns()
|
||||
if self._sigwinch_default:
|
||||
self._sigwinch_default(signum, frame)
|
||||
|
||||
@@ -132,7 +131,7 @@ class TerminalFilter(object):
|
||||
cr = (env['LINES'], env['COLUMNS'])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return cr
|
||||
return cr[1]
|
||||
|
||||
def __init__(self, main, helper, console, errconsole, format):
|
||||
self.main = main
|
||||
@@ -208,7 +207,7 @@ class TerminalFilter(object):
|
||||
content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
|
||||
print(content)
|
||||
lines = 1 + int(len(content) / (self.columns + 1))
|
||||
for tasknum, task in enumerate(tasks[:(self.rows - 2)]):
|
||||
for tasknum, task in enumerate(tasks):
|
||||
content = "%s: %s" % (tasknum, task)
|
||||
print(content)
|
||||
lines = lines + 1 + int(len(content) / (self.columns + 1))
|
||||
@@ -231,7 +230,7 @@ def _log_settings_from_server(server):
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
consolelogfile, error = server.runCommand(["getSetVariable", "BB_CONSOLELOG"])
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
@@ -272,7 +271,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
server.terminateServer()
|
||||
return
|
||||
|
||||
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
|
||||
if consolelogfile and not params.options.show_environment:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
@@ -285,7 +284,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if not params.observe_only:
|
||||
params.updateFromServer(server)
|
||||
params.updateToServer(server, os.environ.copy())
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
@@ -323,8 +321,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
break
|
||||
termfilter.updateFooter()
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if event is None:
|
||||
continue
|
||||
if event is None:
|
||||
continue
|
||||
helper.eventHandler(event)
|
||||
if isinstance(event, bb.runqueue.runQueueExitWait):
|
||||
if not main.shutdown:
|
||||
@@ -353,7 +351,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
# but do show them if the user has changed the default log level to
|
||||
# include verbose/debug messages
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE:
|
||||
continue
|
||||
logger.handle(event)
|
||||
continue
|
||||
@@ -509,11 +507,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
termfilter.clearFooter()
|
||||
# ignore interrupted io
|
||||
if ioerror.args[0] == 4:
|
||||
continue
|
||||
sys.stderr.write(str(ioerror))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
termfilter.clearFooter()
|
||||
if params.observe_only:
|
||||
@@ -532,34 +526,25 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
logger.error("Unable to cleanly shutdown: %s" % error)
|
||||
main.shutdown = main.shutdown + 1
|
||||
pass
|
||||
except Exception as e:
|
||||
sys.stderr.write(str(e))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
try:
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
"\nSummary: %s tasks failed:", len(taskfailures))
|
||||
for failure in taskfailures:
|
||||
summary += "\n %s" % failure
|
||||
if warnings:
|
||||
summary += pluralise("\nSummary: There was %s WARNING message shown.",
|
||||
"\nSummary: There were %s WARNING messages shown.", warnings)
|
||||
if return_value and errors:
|
||||
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
|
||||
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
|
||||
if summary:
|
||||
print(summary)
|
||||
|
||||
if interrupted:
|
||||
print("Execution was interrupted, returning a non-zero exit code.")
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
except IOError as e:
|
||||
import errno
|
||||
if e.errno == errno.EPIPE:
|
||||
pass
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
"\nSummary: %s tasks failed:", len(taskfailures))
|
||||
for failure in taskfailures:
|
||||
summary += "\n %s" % failure
|
||||
if warnings:
|
||||
summary += pluralise("\nSummary: There was %s WARNING message shown.",
|
||||
"\nSummary: There were %s WARNING messages shown.", warnings)
|
||||
if return_value and errors:
|
||||
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
|
||||
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
|
||||
if summary:
|
||||
print(summary)
|
||||
|
||||
if interrupted:
|
||||
print("Execution was interrupted, returning a non-zero exit code.")
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
|
||||
return return_value
|
||||
|
||||
@@ -361,13 +361,13 @@ class NCursesUI:
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
def main(server, eventHandler):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
|
||||
return
|
||||
ui = NCursesUI()
|
||||
try:
|
||||
curses.wrapper(ui.main, server, eventHandler, params)
|
||||
curses.wrapper(ui.main, server, eventHandler)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -21,8 +21,6 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from __future__ import division
|
||||
import time
|
||||
import sys
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
@@ -32,200 +30,104 @@ from bb.ui import uihelper
|
||||
from bb.ui.buildinfohelper import BuildInfoHelper
|
||||
|
||||
import bb.msg
|
||||
import copy
|
||||
import fcntl
|
||||
import logging
|
||||
import os
|
||||
import progressbar
|
||||
import signal
|
||||
import struct
|
||||
import sys
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# module properties for UI modules are read by bitbake and the contract should not be broken
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
|
||||
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
|
||||
|
||||
|
||||
def _log_settings_from_server(server):
|
||||
# Get values of variables which control our output
|
||||
includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS variable: %s", error)
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s", error)
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s", error)
|
||||
raise BaseException(error)
|
||||
return consolelogfile
|
||||
return includelogs, loglines
|
||||
|
||||
# create a log file for a single build and direct the logger at it;
|
||||
# log file name is timestamped to the millisecond (depending
|
||||
# on system clock accuracy) to ensure it doesn't overlap with
|
||||
# other log file names
|
||||
#
|
||||
# returns (log file, path to log file) for a build
|
||||
def _open_build_log(log_dir):
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
|
||||
now = time.time()
|
||||
now_ms = int((now - int(now)) * 1000)
|
||||
time_str = time.strftime('build_%Y%m%d_%H%M%S', time.localtime(now))
|
||||
log_file_name = time_str + ('.%d.log' % now_ms)
|
||||
build_log_file_path = os.path.join(log_dir, log_file_name)
|
||||
|
||||
build_log = logging.FileHandler(build_log_file_path)
|
||||
|
||||
logformat = bb.msg.BBLogFormatter(format_str)
|
||||
build_log.setFormatter(logformat)
|
||||
|
||||
bb.msg.addDefaultlogFilter(build_log)
|
||||
logger.addHandler(build_log)
|
||||
|
||||
return (build_log, build_log_file_path)
|
||||
|
||||
# stop logging to the build log if it exists
|
||||
def _close_build_log(build_log):
|
||||
if build_log:
|
||||
build_log.flush()
|
||||
build_log.close()
|
||||
logger.removeHandler(build_log)
|
||||
|
||||
_evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.LogRecord",
|
||||
"bb.build.TaskFailed", "bb.build.TaskBase", "bb.event.ParseStarted",
|
||||
"bb.event.ParseProgress", "bb.event.ParseCompleted", "bb.event.CacheLoadStarted",
|
||||
"bb.event.CacheLoadProgress", "bb.event.CacheLoadCompleted", "bb.command.CommandFailed",
|
||||
"bb.command.CommandExit", "bb.command.CommandCompleted", "bb.cooker.CookerExit",
|
||||
"bb.event.MultipleProviders", "bb.event.NoProvider", "bb.runqueue.sceneQueueTaskStarted",
|
||||
"bb.runqueue.runQueueTaskStarted", "bb.runqueue.runQueueTaskFailed", "bb.runqueue.sceneQueueTaskFailed",
|
||||
"bb.event.BuildBase", "bb.build.TaskStarted", "bb.build.TaskSucceeded", "bb.build.TaskFailedSilent"]
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
# set to a logging.FileHandler instance when a build starts;
|
||||
# see _open_build_log()
|
||||
build_log = None
|
||||
|
||||
# set to the log path when a build starts
|
||||
build_log_file_path = None
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
# TODO don't use log output to determine when bitbake has started
|
||||
#
|
||||
# WARNING: this log handler cannot be removed, as localhostbecontroller
|
||||
# relies on output in the toaster_ui.log file to determine whether
|
||||
# the bitbake server has started, which only happens if
|
||||
# this logger is setup here (see the TODO in the loop below)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
formatter = bb.msg.BBLogFormatter(format_str)
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(formatter)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
llevel, debug_domains = bb.msg.constructLogOptions()
|
||||
server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
|
||||
def main(server, eventHandler, params ):
|
||||
|
||||
includelogs, loglines = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
|
||||
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
format = bb.msg.BBLogFormatter(format_str)
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
|
||||
if not params.observe_only:
|
||||
logger.error("ToasterUI can only work in observer mode")
|
||||
return 1
|
||||
return
|
||||
|
||||
|
||||
# set to 1 when toasterui needs to shut down
|
||||
main.shutdown = 0
|
||||
|
||||
interrupted = False
|
||||
return_value = 0
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
first = True
|
||||
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
# write our own log files into bitbake's log directory;
|
||||
# we're only interested in the path to the parent directory of
|
||||
# this file, as we're writing our own logs into the same directory
|
||||
consolelogfile = _log_settings_from_server(server)
|
||||
log_dir = os.path.dirname(consolelogfile)
|
||||
bb.utils.mkdirhier(log_dir)
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if first:
|
||||
first = False
|
||||
|
||||
# TODO don't use log output to determine when bitbake has started
|
||||
#
|
||||
# this is the line localhostbecontroller needs to
|
||||
# see in toaster_ui.log which it uses to decide whether
|
||||
# the bitbake server has started...
|
||||
logger.info("ToasterUI waiting for events")
|
||||
|
||||
if event is None:
|
||||
if main.shutdown > 0:
|
||||
# if shutting down, close any open build log first
|
||||
_close_build_log(build_log)
|
||||
|
||||
break
|
||||
continue
|
||||
|
||||
helper.eventHandler(event)
|
||||
|
||||
# pylint: disable=protected-access
|
||||
# the code will look into the protected variables of the event; no easy way around this
|
||||
|
||||
# we treat ParseStarted as the first event of toaster-triggered
|
||||
# builds; that way we get the Build Configuration included in the log
|
||||
# and any errors that occur before BuildStarted is fired
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
if not (build_log and build_log_file_path):
|
||||
build_log, build_log_file_path = _open_build_log(log_dir)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.BuildStarted):
|
||||
if not (build_log and build_log_file_path):
|
||||
build_log, build_log_file_path = _open_build_log(log_dir)
|
||||
|
||||
buildinfohelper.store_started_build(event, build_log_file_path)
|
||||
buildinfohelper.store_started_build(event)
|
||||
|
||||
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
logger.info("Logfile for task %s", event.logfile)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.build.TaskBase):
|
||||
logger.info(event._message)
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
logger.info(event.msg)
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno == -1:
|
||||
event.levelno = formatter.ERROR
|
||||
|
||||
buildinfohelper.store_log_event(event)
|
||||
|
||||
if event.levelno >= formatter.ERROR:
|
||||
if event.levelno >= format.ERROR:
|
||||
errors = errors + 1
|
||||
elif event.levelno == formatter.WARNING:
|
||||
return_value = 1
|
||||
elif event.levelno == format.WARNING:
|
||||
warnings = warnings + 1
|
||||
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
# but do show them if the user has changed the default log level to
|
||||
# include verbose/debug messages
|
||||
if event.taskpid != 0 and event.levelno <= formatter.NOTE:
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE:
|
||||
continue
|
||||
|
||||
logger.handle(event)
|
||||
@@ -233,6 +135,7 @@ def main(server, eventHandler, params):
|
||||
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
return_value = 1
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
bb.error("Logfile of failure stored in: %s" % logfile)
|
||||
@@ -242,6 +145,8 @@ def main(server, eventHandler, params):
|
||||
# timing and error informations from the parsing phase in Toaster
|
||||
if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)):
|
||||
continue
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
continue
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
continue
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
@@ -253,13 +158,9 @@ def main(server, eventHandler, params):
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
continue
|
||||
if isinstance(event, bb.event.MultipleProviders):
|
||||
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
|
||||
event._item,
|
||||
", ".join(event._candidates))
|
||||
logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
@@ -309,41 +210,27 @@ def main(server, eventHandler, params):
|
||||
if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.event.BuildCompleted, bb.command.CommandFailed)):
|
||||
|
||||
errorcode = 0
|
||||
if isinstance(event, bb.command.CommandFailed):
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
|
||||
# turn off logging to the current build log
|
||||
_close_build_log(build_log)
|
||||
|
||||
# reset ready for next BuildStarted
|
||||
build_log = None
|
||||
|
||||
# update the build info helper on BuildCompleted, not on CommandXXX
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done 1, brbe: %s", buildinfohelper.brbe )
|
||||
|
||||
# we start a new build info
|
||||
logger.debug("ToasterUI prepared for new build")
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
logger.info("ToasterUI build done 2")
|
||||
if isinstance(event, (bb.event.BuildCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = event.error
|
||||
event.pathname = ""
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
|
||||
# we start a new build info
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.MetadataEvent):
|
||||
@@ -359,18 +246,13 @@ def main(server, eventHandler, params):
|
||||
buildinfohelper.store_missed_state_tasks(event)
|
||||
elif event.type == "ImageFileSize":
|
||||
buildinfohelper.update_target_image_file(event)
|
||||
elif event.type == "ArtifactFileSize":
|
||||
buildinfohelper.update_artifact_image_file(event)
|
||||
elif event.type == "LicenseManifestPath":
|
||||
buildinfohelper.store_license_manifest_path(event)
|
||||
else:
|
||||
logger.error("Unprocessed MetadataEvent %s ", str(event))
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
# shutdown when bitbake server shuts down
|
||||
main.shutdown = 1
|
||||
continue
|
||||
# exit when the server exits
|
||||
break
|
||||
|
||||
# ignore
|
||||
if isinstance(event, (bb.event.BuildBase,
|
||||
@@ -381,16 +263,14 @@ def main(server, eventHandler, params):
|
||||
bb.event.OperationProgress,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit,
|
||||
bb.command.CommandCompleted,
|
||||
bb.event.ReachableStamps)):
|
||||
bb.command.CommandCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.DepTreeGenerated):
|
||||
buildinfohelper.store_dependency_information(event)
|
||||
continue
|
||||
|
||||
logger.warn("Unknown event: %s", event)
|
||||
return_value += 1
|
||||
logger.error("Unknown event: %s", event)
|
||||
|
||||
except EnvironmentError as ioerror:
|
||||
# ignore interrupted io
|
||||
@@ -398,31 +278,15 @@ def main(server, eventHandler, params):
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
main.shutdown = 1
|
||||
pass
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
logger.error(e)
|
||||
import traceback
|
||||
from pprint import pformat
|
||||
exception_data = traceback.format_exc()
|
||||
logger.error("%s\n%s" , e, exception_data)
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
_, _, tb = sys.exc_info()
|
||||
if tb is not None:
|
||||
curr = tb
|
||||
while curr is not None:
|
||||
logger.error("Error data dump %s\n%s\n" , traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals))
|
||||
curr = curr.tb_next
|
||||
if interrupted:
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
|
||||
# save them to database, if possible; if it fails, we already logged to console.
|
||||
try:
|
||||
buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
|
||||
except Exception as ce:
|
||||
logger.error("CRITICAL - Failed to to save toaster exception to the database: %s", str(ce))
|
||||
|
||||
# make sure we return with an error
|
||||
return_value += 1
|
||||
|
||||
if interrupted and return_value == 0:
|
||||
return_value += 1
|
||||
|
||||
logger.warn("Return value is %d", return_value)
|
||||
return return_value
|
||||
|
||||
@@ -44,27 +44,10 @@ class BBUIEventQueue:
|
||||
server.register_function( self.send_event, "event.sendpickle" )
|
||||
server.socket.settimeout(1)
|
||||
|
||||
self.EventHandler = None
|
||||
count_tries = 0
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
# the event handler registration may fail here due to cooker being in invalid state
|
||||
# this is a transient situation, and we should retry a couple of times before
|
||||
# giving up
|
||||
|
||||
while self.EventHandler == None and count_tries < 5:
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
if (self.EventHandle != None):
|
||||
break
|
||||
|
||||
bb.warn("Could not register UI event handler %s:%d, retry" % (self.host, self.port))
|
||||
count_tries += 1
|
||||
import time
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if self.EventHandle == None:
|
||||
raise Exception("Could not register UI event handler")
|
||||
if (self.EventHandle == None):
|
||||
bb.fatal("Could not register UI event handler")
|
||||
|
||||
self.server = server
|
||||
|
||||
@@ -106,12 +89,7 @@ class BBUIEventQueue:
|
||||
|
||||
self.server.timeout = 1
|
||||
while not self.server.quit:
|
||||
try:
|
||||
self.server.handle_request()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
|
||||
|
||||
self.server.handle_request()
|
||||
self.server.server_close()
|
||||
|
||||
def system_quit( self ):
|
||||
|
||||
@@ -29,14 +29,10 @@ import multiprocessing
|
||||
import fcntl
|
||||
import subprocess
|
||||
import glob
|
||||
import fnmatch
|
||||
import traceback
|
||||
import errno
|
||||
import signal
|
||||
from commands import getstatusoutput
|
||||
from contextlib import contextmanager
|
||||
from ctypes import cdll
|
||||
|
||||
|
||||
logger = logging.getLogger("BitBake.Util")
|
||||
|
||||
@@ -57,9 +53,6 @@ def set_context(ctx):
|
||||
# Context used in better_exec, eval
|
||||
_context = clean_context()
|
||||
|
||||
class VersionStringException(Exception):
|
||||
"""Exception raised when an invalid version specification is found"""
|
||||
|
||||
def explode_version(s):
|
||||
r = []
|
||||
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
|
||||
@@ -135,28 +128,6 @@ def vercmp_string(a, b):
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
|
||||
def vercmp_string_op(a, b, op):
|
||||
"""
|
||||
Compare two versions and check if the specified comparison operator matches the result of the comparison.
|
||||
This function is fairly liberal about what operators it will accept since there are a variety of styles
|
||||
depending on the context.
|
||||
"""
|
||||
res = vercmp_string(a, b)
|
||||
if op in ('=', '=='):
|
||||
return res == 0
|
||||
elif op == '<=':
|
||||
return res <= 0
|
||||
elif op == '>=':
|
||||
return res >= 0
|
||||
elif op in ('>', '>>'):
|
||||
return res > 0
|
||||
elif op in ('<', '<<'):
|
||||
return res < 0
|
||||
elif op == '!=':
|
||||
return res != 0
|
||||
else:
|
||||
raise VersionStringException('Unsupported comparison operator "%s"' % op)
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
@@ -217,7 +188,6 @@ def explode_dep_versions2(s):
|
||||
i = i[1:]
|
||||
else:
|
||||
# This is an unsupported case!
|
||||
raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
|
||||
lastcmp = (i or "")
|
||||
i = ""
|
||||
i.strip()
|
||||
@@ -294,7 +264,7 @@ def _print_trace(body, line):
|
||||
def better_compile(text, file, realfile, mode = "exec"):
|
||||
"""
|
||||
A better compile method. This method
|
||||
will print the offending lines.
|
||||
will print the offending lines.
|
||||
"""
|
||||
try:
|
||||
return compile(text, file, mode)
|
||||
@@ -384,11 +354,14 @@ def better_exec(code, context, text = None, realfile = "<code>"):
|
||||
code = better_compile(code, realfile, realfile)
|
||||
try:
|
||||
exec(code, get_context(), context)
|
||||
except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
|
||||
# Error already shown so passthrough, no need for traceback
|
||||
except bb.BBHandledException:
|
||||
# Error already shown so passthrough
|
||||
raise
|
||||
except Exception as e:
|
||||
(t, value, tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
raise
|
||||
try:
|
||||
_print_exception(t, value, tb, realfile, text, context)
|
||||
except Exception as e:
|
||||
@@ -416,30 +389,10 @@ def fileslocked(files):
|
||||
for lock in locks:
|
||||
bb.utils.unlockfile(lock)
|
||||
|
||||
@contextmanager
|
||||
def timeout(seconds):
|
||||
def timeout_handler(signum, frame):
|
||||
pass
|
||||
|
||||
original_handler = signal.signal(signal.SIGALRM, timeout_handler)
|
||||
|
||||
try:
|
||||
signal.alarm(seconds)
|
||||
yield
|
||||
finally:
|
||||
signal.alarm(0)
|
||||
signal.signal(signal.SIGALRM, original_handler)
|
||||
|
||||
def lockfile(name, shared=False, retry=True, block=False):
|
||||
def lockfile(name, shared=False, retry=True):
|
||||
"""
|
||||
Use the specified file as a lock file, return when the lock has
|
||||
been acquired. Returns a variable to pass to unlockfile().
|
||||
Parameters:
|
||||
retry: True to re-try locking if it fails, False otherwise
|
||||
block: True to block until the lock succeeds, False otherwise
|
||||
The retry and block parameters are kind of equivalent unless you
|
||||
consider the possibility of sending a signal to the process to break
|
||||
out - at which point you want block=True rather than retry=True.
|
||||
Use the file fn as a lock file, return when the lock has been acquired.
|
||||
Returns a variable to pass to unlockfile().
|
||||
"""
|
||||
dirname = os.path.dirname(name)
|
||||
mkdirhier(dirname)
|
||||
@@ -452,7 +405,7 @@ def lockfile(name, shared=False, retry=True, block=False):
|
||||
op = fcntl.LOCK_EX
|
||||
if shared:
|
||||
op = fcntl.LOCK_SH
|
||||
if not retry and not block:
|
||||
if not retry:
|
||||
op = op | fcntl.LOCK_NB
|
||||
|
||||
while True:
|
||||
@@ -572,7 +525,7 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if removed_vars:
|
||||
if len(removed_vars):
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
@@ -580,7 +533,7 @@ def filter_environment(good_vars):
|
||||
def approved_variables():
|
||||
"""
|
||||
Determine and return the list of whitelisted variables which are approved
|
||||
to remain in the environment.
|
||||
to remain in the envrionment.
|
||||
"""
|
||||
if 'BB_PRESERVE_ENV' in os.environ:
|
||||
return os.environ.keys()
|
||||
@@ -743,12 +696,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
|
||||
renamefailed = 1
|
||||
if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
|
||||
try:
|
||||
# os.rename needs to know the dest path ending with file name
|
||||
# so append the file name to a path only if it's a dir specified
|
||||
srcfname = os.path.basename(src)
|
||||
destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
|
||||
else dest
|
||||
os.rename(src, destpath)
|
||||
os.rename(src, dest)
|
||||
renamefailed = 0
|
||||
except Exception as e:
|
||||
if e[0] != errno.EXDEV:
|
||||
@@ -918,42 +866,28 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def contains_any(variable, checkvalues, truevalue, falsevalue, d):
|
||||
val = d.getVar(variable, True)
|
||||
if not val:
|
||||
return falsevalue
|
||||
val = set(val.split())
|
||||
if isinstance(checkvalues, basestring):
|
||||
checkvalues = set(checkvalues.split())
|
||||
else:
|
||||
checkvalues = set(checkvalues)
|
||||
if checkvalues & val:
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def cpu_count():
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
def process_profilelog(fn, pout = None):
|
||||
# Either call with a list of filenames and set pout or a filename and optionally pout.
|
||||
if not pout:
|
||||
pout = fn + '.processed'
|
||||
pout = open(pout, 'w')
|
||||
def process_profilelog(fn):
|
||||
# Redirect stdout to capture profile information
|
||||
pout = open(fn + '.processed', 'w')
|
||||
so = sys.stdout.fileno()
|
||||
orig_so = os.dup(sys.stdout.fileno())
|
||||
os.dup2(pout.fileno(), so)
|
||||
|
||||
import pstats
|
||||
if isinstance(fn, list):
|
||||
p = pstats.Stats(*fn, stream=pout)
|
||||
else:
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p = pstats.Stats(fn)
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
p.sort_stats('cumulative')
|
||||
p.print_stats()
|
||||
|
||||
os.dup2(orig_so, so)
|
||||
pout.flush()
|
||||
pout.close()
|
||||
|
||||
@@ -961,427 +895,5 @@ def process_profilelog(fn, pout = None):
|
||||
# Was present to work around multiprocessing pool bugs in python < 2.7.3
|
||||
#
|
||||
def multiprocessingpool(*args, **kwargs):
|
||||
|
||||
import multiprocessing.pool
|
||||
#import multiprocessing.util
|
||||
#multiprocessing.util.log_to_stderr(10)
|
||||
# Deal with a multiprocessing bug where signals to the processes would be delayed until the work
|
||||
# completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
|
||||
def wrapper(func):
|
||||
def wrap(self, timeout=None):
|
||||
return func(self, timeout=timeout if timeout is not None else 1e100)
|
||||
return wrap
|
||||
multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
|
||||
|
||||
return multiprocessing.Pool(*args, **kwargs)
|
||||
|
||||
def exec_flat_python_func(func, *args, **kwargs):
|
||||
"""Execute a flat python function (defined with def funcname(args):...)"""
|
||||
# Prepare a small piece of python code which calls the requested function
|
||||
# To do this we need to prepare two things - a set of variables we can use to pass
|
||||
# the values of arguments into the calling function, and the list of arguments for
|
||||
# the function being called
|
||||
context = {}
|
||||
funcargs = []
|
||||
# Handle unnamed arguments
|
||||
aidx = 1
|
||||
for arg in args:
|
||||
argname = 'arg_%s' % aidx
|
||||
context[argname] = arg
|
||||
funcargs.append(argname)
|
||||
aidx += 1
|
||||
# Handle keyword arguments
|
||||
context.update(kwargs)
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
|
||||
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
|
||||
comp = bb.utils.better_compile(code, '<string>', '<string>')
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
return context['retval']
|
||||
|
||||
def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
|
||||
"""Edit lines from a recipe or config file and modify one or more
|
||||
specified variable values set in the file using a specified callback
|
||||
function. Lines are expected to have trailing newlines.
|
||||
Parameters:
|
||||
meta_lines: lines from the file; can be a list or an iterable
|
||||
(e.g. file pointer)
|
||||
variables: a list of variable names to look for. Functions
|
||||
may also be specified, but must be specified with '()' at
|
||||
the end of the name. Note that the function doesn't have
|
||||
any intrinsic understanding of _append, _prepend, _remove,
|
||||
or overrides, so these are considered as part of the name.
|
||||
These values go into a regular expression, so regular
|
||||
expression syntax is allowed.
|
||||
varfunc: callback function called for every variable matching
|
||||
one of the entries in the variables parameter. The function
|
||||
should take four arguments:
|
||||
varname: name of variable matched
|
||||
origvalue: current value in file
|
||||
op: the operator (e.g. '+=')
|
||||
newlines: list of lines up to this point. You can use
|
||||
this to prepend lines before this variable setting
|
||||
if you wish.
|
||||
and should return a three-element tuple:
|
||||
newvalue: new value to substitute in, or None to drop
|
||||
the variable setting entirely. (If the removal
|
||||
results in two consecutive blank lines, one of the
|
||||
blank lines will also be dropped).
|
||||
newop: the operator to use - if you specify None here,
|
||||
the original operation will be used.
|
||||
indent: number of spaces to indent multi-line entries,
|
||||
or -1 to indent up to the level of the assignment
|
||||
and opening quote, or a string to use as the indent.
|
||||
minbreak: True to allow the first element of a
|
||||
multi-line value to continue on the same line as
|
||||
the assignment, False to indent before the first
|
||||
element.
|
||||
match_overrides: True to match items with _overrides on the end,
|
||||
False otherwise
|
||||
Returns a tuple:
|
||||
updated:
|
||||
True if changes were made, False otherwise.
|
||||
newlines:
|
||||
Lines after processing
|
||||
"""
|
||||
|
||||
var_res = {}
|
||||
if match_overrides:
|
||||
override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
|
||||
else:
|
||||
override_re = ''
|
||||
for var in variables:
|
||||
if var.endswith('()'):
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
|
||||
else:
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
varlines = []
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
var_end = ''
|
||||
|
||||
def handle_var_end():
|
||||
prerun_newlines = newlines[:]
|
||||
op = varset_start[len(in_var):].strip()
|
||||
(newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
|
||||
changed = (prerun_newlines != newlines)
|
||||
|
||||
if newvalue is None:
|
||||
# Drop the value
|
||||
return True
|
||||
elif newvalue != full_value or (newop not in [None, op]):
|
||||
if newop not in [None, op]:
|
||||
# Callback changed the operator
|
||||
varset_new = "%s %s" % (in_var, newop)
|
||||
else:
|
||||
varset_new = varset_start
|
||||
|
||||
if isinstance(indent, (int, long)):
|
||||
if indent == -1:
|
||||
indentspc = ' ' * (len(varset_new) + 2)
|
||||
else:
|
||||
indentspc = ' ' * indent
|
||||
else:
|
||||
indentspc = indent
|
||||
if in_var.endswith('()'):
|
||||
# A function definition
|
||||
if isinstance(newvalue, list):
|
||||
newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
|
||||
else:
|
||||
if not newvalue.startswith('\n'):
|
||||
newvalue = '\n' + newvalue
|
||||
if not newvalue.endswith('\n'):
|
||||
newvalue = newvalue + '\n'
|
||||
newlines.append('%s {%s}\n' % (varset_new, newvalue))
|
||||
else:
|
||||
# Normal variable
|
||||
if isinstance(newvalue, list):
|
||||
if not newvalue:
|
||||
# Empty list -> empty string
|
||||
newlines.append('%s ""\n' % varset_new)
|
||||
elif minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_new)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue))
|
||||
return True
|
||||
else:
|
||||
# Put the old lines back where they were
|
||||
newlines.extend(varlines)
|
||||
# If newlines was touched by the function, we'll need to return True
|
||||
return changed
|
||||
|
||||
checkspc = False
|
||||
|
||||
for line in meta_lines:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
varlines.append(line)
|
||||
if in_var.endswith('()'):
|
||||
full_value += '\n' + value
|
||||
else:
|
||||
full_value += value[:-1]
|
||||
if value.endswith(var_end):
|
||||
if in_var.endswith('()'):
|
||||
if full_value.count('{') - full_value.count('}') >= 0:
|
||||
continue
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
else:
|
||||
skip = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
res = var_re.match(line)
|
||||
if res:
|
||||
isfunc = varname.endswith('()')
|
||||
if isfunc:
|
||||
splitvalue = line.split('{', 1)
|
||||
var_end = '}'
|
||||
else:
|
||||
var_end = res.groups()[-1]
|
||||
splitvalue = line.split(var_end, 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if not isfunc and value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
varlines = [line]
|
||||
in_var = res.group(1)
|
||||
if isfunc:
|
||||
in_var += '()'
|
||||
if value.endswith(var_end):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
skip = True
|
||||
break
|
||||
if not skip:
|
||||
if checkspc:
|
||||
checkspc = False
|
||||
if newlines and newlines[-1] == '\n' and line == '\n':
|
||||
# Squash blank line if there are two consecutive blanks after a removal
|
||||
continue
|
||||
newlines.append(line)
|
||||
return (updated, newlines)
|
||||
|
||||
|
||||
def edit_metadata_file(meta_file, variables, varfunc):
|
||||
"""Edit a recipe or config file and modify one or more specified
|
||||
variable values set in the file using a specified callback function.
|
||||
The file is only written to if the value(s) actually change.
|
||||
This is basically the file version of edit_metadata(), see that
|
||||
function's description for parameter/usage information.
|
||||
Returns True if the file was written to, False otherwise.
|
||||
"""
|
||||
with open(meta_file, 'r') as f:
|
||||
(updated, newlines) = edit_metadata(f, variables, varfunc)
|
||||
if updated:
|
||||
with open(meta_file, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
|
||||
def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
"""Edit bblayers.conf, adding and/or removing layers
|
||||
Parameters:
|
||||
bblayers_conf: path to bblayers.conf file to edit
|
||||
add: layer path (or list of layer paths) to add; None or empty
|
||||
list to add nothing
|
||||
remove: layer path (or list of layer paths) to remove; None or
|
||||
empty list to remove nothing
|
||||
Returns a tuple:
|
||||
notadded: list of layers specified to be added but weren't
|
||||
(because they were already in the list)
|
||||
notremoved: list of layers that were specified to be removed
|
||||
but weren't (because they weren't in the list)
|
||||
"""
|
||||
|
||||
import fnmatch
|
||||
|
||||
def remove_trailing_sep(pth):
|
||||
if pth and pth[-1] == os.sep:
|
||||
pth = pth[:-1]
|
||||
return pth
|
||||
|
||||
approved = bb.utils.approved_variables()
|
||||
def canonicalise_path(pth):
|
||||
pth = remove_trailing_sep(pth)
|
||||
if 'HOME' in approved and '~' in pth:
|
||||
pth = os.path.expanduser(pth)
|
||||
return pth
|
||||
|
||||
def layerlist_param(value):
|
||||
if not value:
|
||||
return []
|
||||
elif isinstance(value, list):
|
||||
return [remove_trailing_sep(x) for x in value]
|
||||
else:
|
||||
return [remove_trailing_sep(value)]
|
||||
|
||||
addlayers = layerlist_param(add)
|
||||
removelayers = layerlist_param(remove)
|
||||
|
||||
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
|
||||
bblayercalls = []
|
||||
removed = []
|
||||
plusequals = False
|
||||
orig_bblayers = []
|
||||
|
||||
def handle_bblayers_firstpass(varname, origvalue, op, newlines):
|
||||
bblayercalls.append(op)
|
||||
if op == '=':
|
||||
del orig_bblayers[:]
|
||||
orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
|
||||
return (origvalue, None, 2, False)
|
||||
|
||||
def handle_bblayers(varname, origvalue, op, newlines):
|
||||
updated = False
|
||||
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
|
||||
if removelayers:
|
||||
for removelayer in removelayers:
|
||||
for layer in bblayers:
|
||||
if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
|
||||
updated = True
|
||||
bblayers.remove(layer)
|
||||
removed.append(removelayer)
|
||||
break
|
||||
if addlayers and not plusequals:
|
||||
for addlayer in addlayers:
|
||||
if addlayer not in bblayers:
|
||||
updated = True
|
||||
bblayers.append(addlayer)
|
||||
del addlayers[:]
|
||||
|
||||
if updated:
|
||||
if op == '+=' and not bblayers:
|
||||
bblayers = None
|
||||
return (bblayers, None, 2, False)
|
||||
else:
|
||||
return (origvalue, None, 2, False)
|
||||
|
||||
with open(bblayers_conf, 'r') as f:
|
||||
(_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
|
||||
|
||||
if not bblayercalls:
|
||||
raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
|
||||
|
||||
# Try to do the "smart" thing depending on how the user has laid out
|
||||
# their bblayers.conf file
|
||||
if bblayercalls.count('+=') > 1:
|
||||
plusequals = True
|
||||
|
||||
removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
|
||||
notadded = []
|
||||
for layer in addlayers:
|
||||
layer_canon = canonicalise_path(layer)
|
||||
if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
|
||||
notadded.append(layer)
|
||||
notadded_canon = [canonicalise_path(layer) for layer in notadded]
|
||||
addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
|
||||
|
||||
(updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
|
||||
if addlayers:
|
||||
# Still need to add these
|
||||
for addlayer in addlayers:
|
||||
newlines.append('BBLAYERS += "%s"\n' % addlayer)
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
with open(bblayers_conf, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
|
||||
notremoved = list(set(removelayers) - set(removed))
|
||||
|
||||
return (notadded, notremoved)
|
||||
|
||||
|
||||
def get_file_layer(filename, d):
|
||||
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
|
||||
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
||||
collection_res = {}
|
||||
for collection in collections:
|
||||
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
|
||||
|
||||
def path_to_layer(path):
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, path):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
return match
|
||||
|
||||
result = None
|
||||
bbfiles = (d.getVar('BBFILES', True) or '').split()
|
||||
bbfilesmatch = False
|
||||
for bbfilesentry in bbfiles:
|
||||
if fnmatch.fnmatch(filename, bbfilesentry):
|
||||
bbfilesmatch = True
|
||||
result = path_to_layer(bbfilesentry)
|
||||
|
||||
if not bbfilesmatch:
|
||||
# Probably a bbclass
|
||||
result = path_to_layer(filename)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# Constant taken from http://linux.die.net/include/linux/prctl.h
|
||||
PR_SET_PDEATHSIG = 1
|
||||
|
||||
class PrCtlError(Exception):
|
||||
pass
|
||||
|
||||
def signal_on_parent_exit(signame):
|
||||
"""
|
||||
Trigger signame to be sent when the parent process dies
|
||||
"""
|
||||
signum = getattr(signal, signame)
|
||||
# http://linux.die.net/man/2/prctl
|
||||
result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
|
||||
if result != 0:
|
||||
raise PrCtlError('prctl failed with error code %s' % result)
|
||||
|
||||
#
|
||||
# Manually call the ioprio syscall. We could depend on other libs like psutil
|
||||
# however this gets us enough of what we need to bitbake for now without the
|
||||
# dependency
|
||||
#
|
||||
_unamearch = os.uname()[4]
|
||||
IOPRIO_WHO_PROCESS = 1
|
||||
IOPRIO_CLASS_SHIFT = 13
|
||||
|
||||
def ioprio_set(who, cls, value):
|
||||
NR_ioprio_set = None
|
||||
if _unamearch == "x86_64":
|
||||
NR_ioprio_set = 251
|
||||
elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
|
||||
NR_ioprio_set = 289
|
||||
|
||||
if NR_ioprio_set:
|
||||
ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
|
||||
rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
|
||||
if rc != 0:
|
||||
raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
|
||||
else:
|
||||
bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
Behold, mortal, the origins of Beautiful Soup...
|
||||
================================================
|
||||
|
||||
Leonard Richardson is the primary programmer.
|
||||
|
||||
Aaron DeVore is awesome.
|
||||
|
||||
Mark Pilgrim provided the encoding detection code that forms the base
|
||||
of UnicodeDammit.
|
||||
|
||||
Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
|
||||
Soup 4 working under Python 3.
|
||||
|
||||
Simon Willison wrote soupselect, which was used to make Beautiful Soup
|
||||
support CSS selectors.
|
||||
|
||||
Sam Ruby helped with a lot of edge cases.
|
||||
|
||||
Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
|
||||
work in solving the nestable tags conundrum.
|
||||
|
||||
An incomplete list of people have contributed patches to Beautiful
|
||||
Soup:
|
||||
|
||||
Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
|
||||
Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
|
||||
Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
|
||||
Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
|
||||
Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
|
||||
Samastur, Jouni Sepp<70>nen, Alexander Schmolck, Andy Theyers, Glyn
|
||||
Webster, Paul Wright, Danny Yoo
|
||||
|
||||
An incomplete list of people who made suggestions or found bugs or
|
||||
found ways to break Beautiful Soup:
|
||||
|
||||
Hanno B<>ck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
|
||||
Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
|
||||
Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
|
||||
warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
|
||||
Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
|
||||
Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
|
||||
Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
|
||||
Sousa Rocha, Yichun Wei, Per Vognsen
|
||||
@@ -1,26 +0,0 @@
|
||||
Beautiful Soup is made available under the MIT license:
|
||||
|
||||
Copyright (c) 2004-2012 Leonard Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE, DAMMIT.
|
||||
|
||||
Beautiful Soup incorporates code from the html5lib library, which is
|
||||
also made available under the MIT license.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,406 +0,0 @@
|
||||
"""Beautiful Soup
|
||||
Elixir and Tonic
|
||||
"The Screen-Scraper's Friend"
|
||||
http://www.crummy.com/software/BeautifulSoup/
|
||||
|
||||
Beautiful Soup uses a pluggable XML or HTML parser to parse a
|
||||
(possibly invalid) document into a tree representation. Beautiful Soup
|
||||
provides provides methods and Pythonic idioms that make it easy to
|
||||
navigate, search, and modify the parse tree.
|
||||
|
||||
Beautiful Soup works with Python 2.6 and up. It works better if lxml
|
||||
and/or html5lib is installed.
|
||||
|
||||
For more than you ever wanted to know about Beautiful Soup, see the
|
||||
documentation:
|
||||
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .builder import builder_registry, ParserRejectedMarkup
|
||||
from .dammit import UnicodeDammit
|
||||
from .element import (
|
||||
CData,
|
||||
Comment,
|
||||
DEFAULT_OUTPUT_ENCODING,
|
||||
Declaration,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
PageElement,
|
||||
ProcessingInstruction,
|
||||
ResultSet,
|
||||
SoupStrainer,
|
||||
Tag,
|
||||
)
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
This class defines the basic interface called by the tree builders.
|
||||
|
||||
These methods will be called by the parser:
|
||||
reset()
|
||||
feed(markup)
|
||||
|
||||
The tree builder may call these methods from its feed() implementation:
|
||||
handle_starttag(name, attrs) # See note about return value
|
||||
handle_endtag(name)
|
||||
handle_data(data) # Appends to the current data node
|
||||
endData(containerClass=NavigableString) # Ends the current data node
|
||||
|
||||
No matter how complicated the underlying parser is, you should be
|
||||
able to build a tree using 'start tag' events, 'end tag' events,
|
||||
'data' events, and "done with data" events.
|
||||
|
||||
If you encounter an empty-element tag (aka a self-closing tag,
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
|
||||
if 'convertEntities' in kwargs:
|
||||
warnings.warn(
|
||||
"BS4 does not respect the convertEntities argument to the "
|
||||
"BeautifulSoup constructor. Entities are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'markupMassage' in kwargs:
|
||||
del kwargs['markupMassage']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the markupMassage argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for any necessary markup massage.")
|
||||
|
||||
if 'smartQuotesTo' in kwargs:
|
||||
del kwargs['smartQuotesTo']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the smartQuotesTo argument to the "
|
||||
"BeautifulSoup constructor. Smart quotes are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'selfClosingTags' in kwargs:
|
||||
del kwargs['selfClosingTags']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the selfClosingTags argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for understanding self-closing tags.")
|
||||
|
||||
if 'isHTML' in kwargs:
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
warnings.warn(
|
||||
'The "%s" argument to the BeautifulSoup constructor '
|
||||
'has been renamed to "%s."' % (old_name, new_name))
|
||||
value = kwargs[old_name]
|
||||
del kwargs[old_name]
|
||||
return value
|
||||
return None
|
||||
|
||||
parse_only = parse_only or deprecated_argument(
|
||||
"parseOnlyThese", "parse_only")
|
||||
|
||||
from_encoding = from_encoding or deprecated_argument(
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
builder_class = builder_registry.lookup(*features)
|
||||
if builder_class is None:
|
||||
raise FeatureNotFound(
|
||||
"Couldn't find a tree builder with the features you "
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
|
||||
self.parse_only = parse_only
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
elif len(markup) <= 256:
|
||||
# Print out warnings for a couple beginner problems
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
possible_filename = markup
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
break
|
||||
except ParserRejectedMarkup:
|
||||
pass
|
||||
|
||||
# Clear out the markup and remove the builder's circular
|
||||
# reference to this object.
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
|
||||
self.builder.feed(self.markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
self.popTag()
|
||||
|
||||
def reset(self):
|
||||
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
self.builder.reset()
|
||||
self.current_data = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
self.preserve_whitespace_tag_stack = []
|
||||
self.pushTag(self)
|
||||
|
||||
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
|
||||
"""Create a new tag associated with this soup."""
|
||||
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
|
||||
def insert_after(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
|
||||
|
||||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
|
||||
self.preserve_whitespace_tag_stack.pop()
|
||||
#print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
self.currentTag = self.tagStack[-1]
|
||||
if tag.name in self.builder.preserve_whitespace_tags:
|
||||
self.preserve_whitespace_tag_stack.append(tag)
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
if not self.preserve_whitespace_tag_stack:
|
||||
strippable = True
|
||||
for i in current_data:
|
||||
if i not in self.ASCII_SPACES:
|
||||
strippable = False
|
||||
break
|
||||
if strippable:
|
||||
if '\n' in current_data:
|
||||
current_data = '\n'
|
||||
else:
|
||||
current_data = ' '
|
||||
|
||||
# Reset the data collector.
|
||||
self.current_data = []
|
||||
|
||||
# Should we add this string to the tree at all?
|
||||
if self.parse_only and len(self.tagStack) <= 1 and \
|
||||
(not self.parse_only.text or \
|
||||
not self.parse_only.search(current_data)):
|
||||
return
|
||||
|
||||
o = containerClass(current_data)
|
||||
self.object_was_parsed(o)
|
||||
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
# The BeautifulSoup object itself can never be popped.
|
||||
return
|
||||
|
||||
most_recently_popped = None
|
||||
|
||||
stack_size = len(self.tagStack)
|
||||
for i in range(stack_size - 1, 0, -1):
|
||||
t = self.tagStack[i]
|
||||
if (name == t.name and nsprefix == t.prefix):
|
||||
if inclusivePop:
|
||||
most_recently_popped = self.popTag()
|
||||
break
|
||||
most_recently_popped = self.popTag()
|
||||
|
||||
return most_recently_popped
|
||||
|
||||
def handle_starttag(self, name, namespace, nsprefix, attrs):
|
||||
"""Push a start tag on to the stack.
|
||||
|
||||
If this method returns None, the tag was rejected by the
|
||||
SoupStrainer. You should proceed as if the tag had not occured
|
||||
in the document. For instance, if this was a self-closing tag,
|
||||
don't call handle_endtag.
|
||||
"""
|
||||
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
self.endData()
|
||||
|
||||
if (self.parse_only and len(self.tagStack) <= 1
|
||||
and (self.parse_only.text
|
||||
or not self.parse_only.search_tag(name, attrs))):
|
||||
return None
|
||||
|
||||
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
|
||||
self.currentTag, self._most_recent_element)
|
||||
if tag is None:
|
||||
return tag
|
||||
if self._most_recent_element:
|
||||
self._most_recent_element.next_element = tag
|
||||
self._most_recent_element = tag
|
||||
self.pushTag(tag)
|
||||
return tag
|
||||
|
||||
def handle_endtag(self, name, nsprefix=None):
|
||||
#print "End tag: " + name
|
||||
self.endData()
|
||||
self._popToTag(name, nsprefix)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.current_data.append(data)
|
||||
|
||||
def decode(self, pretty_print=False,
|
||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Returns a string or Unicode representation of this document.
|
||||
To get Unicode, pass None for encoding."""
|
||||
|
||||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
indent_level = 0
|
||||
return prefix + super(BeautifulSoup, self).decode(
|
||||
indent_level, eventual_encoding, formatter)
|
||||
|
||||
# Alias to make it easier to type import: 'from bs4 import _soup'
|
||||
_s = BeautifulSoup
|
||||
_soup = BeautifulSoup
|
||||
|
||||
class BeautifulStoneSoup(BeautifulSoup):
|
||||
"""Deprecated interface to an XML parser."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['features'] = 'xml'
|
||||
warnings.warn(
|
||||
'The BeautifulStoneSoup class is deprecated. Instead of using '
|
||||
'it, pass features="xml" into the BeautifulSoup constructor.')
|
||||
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopParsing(Exception):
|
||||
pass
|
||||
|
||||
class FeatureNotFound(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
@@ -1,321 +0,0 @@
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import sys
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
whitespace_re
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'HTMLTreeBuilder',
|
||||
'SAXTreeBuilder',
|
||||
'TreeBuilder',
|
||||
'TreeBuilderRegistry',
|
||||
]
|
||||
|
||||
# Some useful features for a TreeBuilder to have.
|
||||
FAST = 'fast'
|
||||
PERMISSIVE = 'permissive'
|
||||
STRICT = 'strict'
|
||||
XML = 'xml'
|
||||
HTML = 'html'
|
||||
HTML_5 = 'html5'
|
||||
|
||||
|
||||
class TreeBuilderRegistry(object):
|
||||
|
||||
def __init__(self):
|
||||
self.builders_for_feature = defaultdict(list)
|
||||
self.builders = []
|
||||
|
||||
def register(self, treebuilder_class):
|
||||
"""Register a treebuilder based on its advertised features."""
|
||||
for feature in treebuilder_class.features:
|
||||
self.builders_for_feature[feature].insert(0, treebuilder_class)
|
||||
self.builders.insert(0, treebuilder_class)
|
||||
|
||||
def lookup(self, *features):
|
||||
if len(self.builders) == 0:
|
||||
# There are no builders at all.
|
||||
return None
|
||||
|
||||
if len(features) == 0:
|
||||
# They didn't ask for any features. Give them the most
|
||||
# recently registered builder.
|
||||
return self.builders[0]
|
||||
|
||||
# Go down the list of features in order, and eliminate any builders
|
||||
# that don't match every feature.
|
||||
features = list(features)
|
||||
features.reverse()
|
||||
candidates = None
|
||||
candidate_set = None
|
||||
while len(features) > 0:
|
||||
feature = features.pop()
|
||||
we_have_the_feature = self.builders_for_feature.get(feature, [])
|
||||
if len(we_have_the_feature) > 0:
|
||||
if candidates is None:
|
||||
candidates = we_have_the_feature
|
||||
candidate_set = set(candidates)
|
||||
else:
|
||||
# Eliminate any candidates that don't have this feature.
|
||||
candidate_set = candidate_set.intersection(
|
||||
set(we_have_the_feature))
|
||||
|
||||
# The only valid candidates are the ones in candidate_set.
|
||||
# Go through the original list of candidates and pick the first one
|
||||
# that's in candidate_set.
|
||||
if candidate_set is None:
|
||||
return None
|
||||
for candidate in candidates:
|
||||
if candidate in candidate_set:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
# The BeautifulSoup class will take feature lists from developers and use them
|
||||
# to look up builders in this registry.
|
||||
builder_registry = TreeBuilderRegistry()
|
||||
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
|
||||
# A value for these tag/attribute combinations is a space- or
|
||||
# comma-separated list of CDATA, rather than a single CDATA.
|
||||
cdata_list_attributes = {}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.soup = None
|
||||
|
||||
def reset(self):
|
||||
pass
|
||||
|
||||
def can_be_empty_element(self, tag_name):
|
||||
"""Might a tag with this name be an empty-element tag?
|
||||
|
||||
The final markup may or may not actually present this tag as
|
||||
self-closing.
|
||||
|
||||
For instance: an HTMLBuilder does not consider a <p> tag to be
|
||||
an empty-element tag (it's not in
|
||||
HTMLBuilder.empty_element_tags). This means an empty <p> tag
|
||||
will be presented as "<p></p>", not "<p />".
|
||||
|
||||
The default implementation has no opinion about which tags are
|
||||
empty-element tags, so a tag will be presented as an
|
||||
empty-element tag if and only if it has no contents.
|
||||
"<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
|
||||
be left alone.
|
||||
"""
|
||||
if self.empty_element_tags is None:
|
||||
return True
|
||||
return tag_name in self.empty_element_tags
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
return markup, None, None, False
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""Wrap an HTML fragment to make it look like a document.
|
||||
|
||||
Different parsers do this differently. For instance, lxml
|
||||
introduces an empty <head> tag, and html5lib
|
||||
doesn't. Abstracting this away lets us write simple tests
|
||||
which run HTML fragments through the parser and compare the
|
||||
results against other HTML fragments.
|
||||
|
||||
This method should not be used outside of tests.
|
||||
"""
|
||||
return fragment
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
return False
|
||||
|
||||
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
|
||||
"""Replaces class="foo bar" with class=["foo", "bar"]
|
||||
|
||||
Modifies its input in place.
|
||||
"""
|
||||
if not attrs:
|
||||
return attrs
|
||||
if self.cdata_list_attributes:
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
# for the same tag when rearranging the parse
|
||||
# tree. On the second call the attribute value
|
||||
# here is already a list. If this happens,
|
||||
# leave the value alone rather than trying to
|
||||
# split it again.
|
||||
values = value
|
||||
attrs[attr] = values
|
||||
return attrs
|
||||
|
||||
class SAXTreeBuilder(TreeBuilder):
|
||||
"""A Beautiful Soup treebuilder that listens for SAX events."""
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
attrs = dict((key[1], value) for key, value in list(attrs.items()))
|
||||
#print "Start %s, %r" % (name, attrs)
|
||||
self.soup.handle_starttag(name, attrs)
|
||||
|
||||
def endElement(self, name):
|
||||
#print "End %s" % name
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def startElementNS(self, nsTuple, nodeName, attrs):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.startElement(nodeName, attrs)
|
||||
|
||||
def endElementNS(self, nsTuple, nodeName):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.endElement(nodeName)
|
||||
#handler.endElementNS((ns, node.nodeName), node.nodeName)
|
||||
|
||||
def startPrefixMapping(self, prefix, nodeValue):
|
||||
# Ignore the prefix for now.
|
||||
pass
|
||||
|
||||
def endPrefixMapping(self, prefix):
|
||||
# Ignore the prefix for now.
|
||||
# handler.endPrefixMapping(prefix)
|
||||
pass
|
||||
|
||||
def characters(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def startDocument(self):
|
||||
pass
|
||||
|
||||
def endDocument(self):
|
||||
pass
|
||||
|
||||
|
||||
class HTMLTreeBuilder(TreeBuilder):
|
||||
"""This TreeBuilder knows facts about HTML.
|
||||
|
||||
Such as which tags are empty-element tags.
|
||||
"""
|
||||
|
||||
preserve_whitespace_tags = set(['pre', 'textarea'])
|
||||
empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
|
||||
'spacer', 'link', 'frame', 'base'])
|
||||
|
||||
# The HTML standard defines these attributes as containing a
|
||||
# space-separated list of values, not a single value. That is,
|
||||
# class="foo bar" means that the 'class' attribute has two values,
|
||||
# 'foo' and 'bar', not the single value 'foo bar'. When we
|
||||
# encounter one of these attributes, we will parse its value into
|
||||
# a list of values if possible. Upon output, the list will be
|
||||
# converted back into a string.
|
||||
cdata_list_attributes = {
|
||||
"*" : ['class', 'accesskey', 'dropzone'],
|
||||
"a" : ['rel', 'rev'],
|
||||
"link" : ['rel', 'rev'],
|
||||
"td" : ["headers"],
|
||||
"th" : ["headers"],
|
||||
"td" : ["headers"],
|
||||
"form" : ["accept-charset"],
|
||||
"object" : ["archive"],
|
||||
|
||||
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
|
||||
"area" : ["rel"],
|
||||
"icon" : ["sizes"],
|
||||
"iframe" : ["sandbox"],
|
||||
"output" : ["for"],
|
||||
}
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
# We are only interested in <meta> tags
|
||||
if tag.name != 'meta':
|
||||
return False
|
||||
|
||||
http_equiv = tag.get('http-equiv')
|
||||
content = tag.get('content')
|
||||
charset = tag.get('charset')
|
||||
|
||||
# We are interested in <meta> tags that say what encoding the
|
||||
# document was originally in. This means HTML 5-style <meta>
|
||||
# tags that provide the "charset" attribute. It also means
|
||||
# HTML 4-style <meta> tags that provide the "content"
|
||||
# attribute and have "http-equiv" set to "content-type".
|
||||
#
|
||||
# In both cases we will replace the value of the appropriate
|
||||
# attribute with a standin object that can take on any
|
||||
# encoding.
|
||||
meta_encoding = None
|
||||
if charset is not None:
|
||||
# HTML 5 style:
|
||||
# <meta charset="utf8">
|
||||
meta_encoding = charset
|
||||
tag['charset'] = CharsetMetaAttributeValue(charset)
|
||||
|
||||
elif (content is not None and http_equiv is not None
|
||||
and http_equiv.lower() == 'content-type'):
|
||||
# HTML 4 style:
|
||||
# <meta http-equiv="content-type" content="text/html; charset=utf8">
|
||||
tag['content'] = ContentMetaAttributeValue(content)
|
||||
|
||||
return (meta_encoding is not None)
|
||||
|
||||
def register_treebuilders_from(module):
|
||||
"""Copy TreeBuilders from the given module into this module."""
|
||||
# I'm fairly sure this is not the best way to do this.
|
||||
this_module = sys.modules['bs4.builder']
|
||||
for name in module.__all__:
|
||||
obj = getattr(module, name)
|
||||
|
||||
if issubclass(obj, TreeBuilder):
|
||||
setattr(this_module, name, obj)
|
||||
this_module.__all__.append(name)
|
||||
# Register the builder while we're at it.
|
||||
this_module.builder_registry.register(obj)
|
||||
|
||||
class ParserRejectedMarkup(Exception):
|
||||
pass
|
||||
|
||||
# Builders are registered in reverse order of priority, so that custom
|
||||
# builder registrations will take precedence. In general, we want lxml
|
||||
# to take precedence over html5lib, because it's faster. And we only
|
||||
# want to use HTMLParser as a last result.
|
||||
from . import _htmlparser
|
||||
register_treebuilders_from(_htmlparser)
|
||||
try:
|
||||
from . import _html5lib
|
||||
register_treebuilders_from(_html5lib)
|
||||
except ImportError:
|
||||
# They don't have html5lib installed.
|
||||
pass
|
||||
try:
|
||||
from . import _lxml
|
||||
register_treebuilders_from(_lxml)
|
||||
except ImportError:
|
||||
# They don't have lxml installed.
|
||||
pass
|
||||
@@ -1,285 +0,0 @@
|
||||
__all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
HTML,
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
from html5lib.constants import namespaces
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
Tag,
|
||||
)
|
||||
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
def feed(self, markup):
|
||||
if self.soup.parse_only is not None:
|
||||
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
|
||||
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
else:
|
||||
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
|
||||
|
||||
def create_treebuilder(self, namespaceHTMLElements):
|
||||
self.underlying_builder = TreeBuilderForHtml5lib(
|
||||
self.soup, namespaceHTMLElements)
|
||||
return self.underlying_builder
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
|
||||
|
||||
def documentClass(self):
|
||||
self.soup.reset()
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def insertDoctype(self, token):
|
||||
name = token["name"]
|
||||
publicId = token["publicId"]
|
||||
systemId = token["systemId"]
|
||||
|
||||
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def elementClass(self, name, namespace):
|
||||
tag = self.soup.new_tag(name, namespace)
|
||||
return Element(tag, self.soup, namespace)
|
||||
|
||||
def commentClass(self, data):
|
||||
return TextNode(Comment(data), self.soup)
|
||||
|
||||
def fragmentClass(self):
|
||||
self.soup = BeautifulSoup("")
|
||||
self.soup.name = "[document_fragment]"
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def appendChild(self, node):
|
||||
# XXX This code is not covered by the BS4 tests.
|
||||
self.soup.append(node.element)
|
||||
|
||||
def getDocument(self):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
self.attrs = dict(self.element.attrs)
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
def keys(self):
|
||||
return list(self.attrs.keys())
|
||||
def __len__(self):
|
||||
return len(self.attrs)
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
def __contains__(self, name):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(html5lib.treebuilders._base.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
html5lib.treebuilders._base.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
string_child = child = node
|
||||
elif isinstance(node, Tag):
|
||||
# Some other piece of code decided to pass in a Tag
|
||||
# instead of creating an Element object to contain the
|
||||
# Tag.
|
||||
child = node
|
||||
elif node.element.__class__ == NavigableString:
|
||||
string_child = child = node.element
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
and self.element.contents[-1].__class__ == NavigableString):
|
||||
# We are appending a string onto another string.
|
||||
# TODO This has O(n^2) performance, for input like
|
||||
# "a</a>a</a>a</a>..."
|
||||
old_element = self.element.contents[-1]
|
||||
new_element = self.soup.new_string(old_element + string_child)
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
# Tell Beautiful Soup to act as if it parsed this element
|
||||
# immediately after the parent's last descendant. (Or
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
self.soup.object_was_parsed(
|
||||
child, parent=self.element,
|
||||
most_recent_element=most_recent_element)
|
||||
|
||||
def getAttributes(self):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
for name, value in list(attributes.items()):
|
||||
if isinstance(name, tuple):
|
||||
new_name = NamespacedAttribute(*name)
|
||||
del attributes[name]
|
||||
attributes[new_name] = value
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
# Call set_up_substitutions manually.
|
||||
#
|
||||
# The Tag constructor called this method when the Tag was created,
|
||||
# but we just set/changed the attributes, so call it again.
|
||||
self.soup.builder.set_up_substitutions(self.element)
|
||||
attributes = property(getAttributes, setAttributes)
|
||||
|
||||
def insertText(self, data, insertBefore=None):
|
||||
if insertBefore:
|
||||
text = TextNode(self.soup.new_string(data), self.soup)
|
||||
self.insertBefore(data, insertBefore)
|
||||
else:
|
||||
self.appendChild(data)
|
||||
|
||||
def insertBefore(self, node, refNode):
|
||||
index = self.element.index(refNode.element)
|
||||
if (node.element.__class__ == NavigableString and self.element.contents
|
||||
and self.element.contents[index-1].__class__ == NavigableString):
|
||||
# (See comments in appendChild)
|
||||
old_node = self.element.contents[index-1]
|
||||
new_str = self.soup.new_string(old_node + node.element)
|
||||
old_node.replace_with(new_str)
|
||||
else:
|
||||
self.element.insert(index, node.element)
|
||||
node.parent = self
|
||||
|
||||
def removeChild(self, node):
|
||||
node.element.extract()
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
# are removed.
|
||||
final_next_element = element.next_sibling
|
||||
|
||||
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
|
||||
if len(new_parent_element.contents) > 0:
|
||||
# The new parent already contains children. We will be
|
||||
# appending this tag's children to the end.
|
||||
new_parents_last_child = new_parent_element.contents[-1]
|
||||
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
|
||||
else:
|
||||
# The new parent contains no children.
|
||||
new_parents_last_child = None
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
child.parent = new_parent_element
|
||||
new_parent_element.contents.append(child)
|
||||
|
||||
# Now that this element has no children, change its .next_element.
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
for key,value in self.attributes:
|
||||
node.attributes[key] = value
|
||||
return node
|
||||
|
||||
def hasContent(self):
|
||||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
||||
nameTuple = property(getNameTuple)
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
html5lib.treebuilders._base.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
def cloneNode(self):
|
||||
raise NotImplementedError
|
||||
@@ -1,258 +0,0 @@
|
||||
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
|
||||
|
||||
__all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
|
||||
# argument, which we'd like to set to False. Unfortunately,
|
||||
# http://bugs.python.org/issue13273 makes strict=True a better bet
|
||||
# before Python 3.2.3.
|
||||
#
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
Comment,
|
||||
Declaration,
|
||||
Doctype,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||
|
||||
from bs4.builder import (
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
STRICT,
|
||||
)
|
||||
|
||||
|
||||
HTMLPARSER = 'html.parser'
|
||||
|
||||
class BeautifulSoupHTMLParser(HTMLParser):
|
||||
def handle_starttag(self, name, attrs):
|
||||
# XXX namespace
|
||||
attr_dict = {}
|
||||
for key, value in attrs:
|
||||
# Change None attribute values to the empty string
|
||||
# for consistency with the other tree builders.
|
||||
if value is None:
|
||||
value = ''
|
||||
attr_dict[key] = value
|
||||
attrvalue = '""'
|
||||
self.soup.handle_starttag(name, None, None, attr_dict)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.soup.handle_data(data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
real_name = int(name.lstrip('X'), 16)
|
||||
else:
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
|
||||
if character is not None:
|
||||
data = character
|
||||
else:
|
||||
data = "&%s;" % name
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self.soup.endData()
|
||||
if data.startswith("DOCTYPE "):
|
||||
data = data[len("DOCTYPE "):]
|
||||
elif data == 'DOCTYPE':
|
||||
# i.e. "<!DOCTYPE>"
|
||||
data = ''
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Doctype)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
if data.upper().startswith('CDATA['):
|
||||
cls = CData
|
||||
data = data[len('CDATA['):]
|
||||
else:
|
||||
cls = Declaration
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(cls)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
|
||||
def feed(self, markup):
|
||||
args, kwargs = self.parser_args
|
||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
|
||||
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
|
||||
# string.
|
||||
#
|
||||
# XXX This code can be removed once most Python 3 users are on 3.2.3.
|
||||
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
|
||||
import re
|
||||
attrfind_tolerant = re.compile(
|
||||
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
|
||||
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
|
||||
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
|
||||
|
||||
locatestarttagend = re.compile(r"""
|
||||
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
|
||||
(?:\s+ # whitespace before attribute name
|
||||
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
|
||||
(?:\s*=\s* # value indicator
|
||||
(?:'[^']*' # LITA-enclosed value
|
||||
|\"[^\"]*\" # LIT-enclosed value
|
||||
|[^'\">\s]+ # bare value
|
||||
)
|
||||
)?
|
||||
)
|
||||
)*
|
||||
\s* # trailing whitespace
|
||||
""", re.VERBOSE)
|
||||
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
|
||||
|
||||
from html.parser import tagfind, attrfind
|
||||
|
||||
def parse_starttag(self, i):
|
||||
self.__starttag_text = None
|
||||
endpos = self.check_for_whole_start_tag(i)
|
||||
if endpos < 0:
|
||||
return endpos
|
||||
rawdata = self.rawdata
|
||||
self.__starttag_text = rawdata[i:endpos]
|
||||
|
||||
# Now parse the data between i+1 and j into a tag and attrs
|
||||
attrs = []
|
||||
match = tagfind.match(rawdata, i+1)
|
||||
assert match, 'unexpected call to parse_starttag()'
|
||||
k = match.end()
|
||||
self.lasttag = tag = rawdata[i+1:k].lower()
|
||||
while k < endpos:
|
||||
if self.strict:
|
||||
m = attrfind.match(rawdata, k)
|
||||
else:
|
||||
m = attrfind_tolerant.match(rawdata, k)
|
||||
if not m:
|
||||
break
|
||||
attrname, rest, attrvalue = m.group(1, 2, 3)
|
||||
if not rest:
|
||||
attrvalue = None
|
||||
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
|
||||
attrvalue[:1] == '"' == attrvalue[-1:]:
|
||||
attrvalue = attrvalue[1:-1]
|
||||
if attrvalue:
|
||||
attrvalue = self.unescape(attrvalue)
|
||||
attrs.append((attrname.lower(), attrvalue))
|
||||
k = m.end()
|
||||
|
||||
end = rawdata[k:endpos].strip()
|
||||
if end not in (">", "/>"):
|
||||
lineno, offset = self.getpos()
|
||||
if "\n" in self.__starttag_text:
|
||||
lineno = lineno + self.__starttag_text.count("\n")
|
||||
offset = len(self.__starttag_text) \
|
||||
- self.__starttag_text.rfind("\n")
|
||||
else:
|
||||
offset = offset + len(self.__starttag_text)
|
||||
if self.strict:
|
||||
self.error("junk characters in start tag: %r"
|
||||
% (rawdata[k:endpos][:20],))
|
||||
self.handle_data(rawdata[i:endpos])
|
||||
return endpos
|
||||
if end.endswith('/>'):
|
||||
# XHTML-style empty tag: <span attr="value" />
|
||||
self.handle_startendtag(tag, attrs)
|
||||
else:
|
||||
self.handle_starttag(tag, attrs)
|
||||
if tag in self.CDATA_CONTENT_ELEMENTS:
|
||||
self.set_cdata_mode(tag)
|
||||
return endpos
|
||||
|
||||
def set_cdata_mode(self, elem):
|
||||
self.cdata_elem = elem.lower()
|
||||
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
|
||||
|
||||
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
|
||||
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = True
|
||||
@@ -1,233 +0,0 @@
|
||||
__all__ = [
|
||||
'LXMLTreeBuilderForXML',
|
||||
'LXMLTreeBuilder',
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
PERMISSIVE,
|
||||
ParserRejectedMarkup,
|
||||
TreeBuilder,
|
||||
XML)
|
||||
from bs4.dammit import EncodingDetector
|
||||
|
||||
LXML = 'lxml'
|
||||
|
||||
class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
DEFAULT_PARSER_CLASS = etree.XMLParser
|
||||
|
||||
is_xml = True
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
# This namespace mapping is specified in the XML Namespace
|
||||
# standard.
|
||||
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
|
||||
|
||||
def default_parser(self, encoding):
|
||||
# This can either return a parser object or a class, which
|
||||
# will be instantiated with default arguments.
|
||||
if self._default_parser is not None:
|
||||
return self._default_parser
|
||||
return etree.XMLParser(
|
||||
target=self, strip_cdata=False, recover=True, encoding=encoding)
|
||||
|
||||
def parser_for(self, encoding):
|
||||
# Use the default parser.
|
||||
parser = self.default_parser(encoding)
|
||||
|
||||
if isinstance(parser, collections.Callable):
|
||||
# Instantiate the parser with default arguments
|
||||
parser = parser(target=self, strip_cdata=False, encoding=encoding)
|
||||
return parser
|
||||
|
||||
def __init__(self, parser=None, empty_element_tags=None):
|
||||
# TODO: Issue a warning if parser is present but not a
|
||||
# callable, since that means there's no way to create new
|
||||
# parsers for different encodings.
|
||||
self._default_parser = parser
|
||||
if empty_element_tags is not None:
|
||||
self.empty_element_tags = set(empty_element_tags)
|
||||
self.soup = None
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def _getNsTag(self, tag):
|
||||
# Split the namespace URL out of a fully-qualified lxml tag
|
||||
# name. Copied from lxml's src/lxml/sax.py.
|
||||
if tag[0] == '{':
|
||||
return tuple(tag[1:].split('}', 1))
|
||||
else:
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
(markup, encoding, declared encoding,
|
||||
has undergone character replacement)
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
document_declared_encoding, False)
|
||||
|
||||
# Instead of using UnicodeDammit to convert the bytestring to
|
||||
# Unicode using different encodings, use EncodingDetector to
|
||||
# iterate over the encodings, and tell lxml to try to parse
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
# or the parser won't be initialized.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
try:
|
||||
self.parser = self.parser_for(self.soup.original_encoding)
|
||||
self.parser.feed(data)
|
||||
while len(data) != 0:
|
||||
# Now call feed() on the rest of the data, chunk by chunk.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def start(self, name, attrs, nsmap={}):
|
||||
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
|
||||
attrs = dict(attrs)
|
||||
nsprefix = None
|
||||
# Invert each namespace map as it comes in.
|
||||
if len(self.nsmaps) > 1:
|
||||
# There are no new namespaces for this tag, but
|
||||
# non-default namespaces are in play, so we need a
|
||||
# separate tag stack to know when they end.
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
|
||||
# Namespaces are in play. Find any attributes that came in
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
else:
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
attr = NamespacedAttribute(nsprefix, attr, namespace)
|
||||
new_attrs[attr] = value
|
||||
attrs = new_attrs
|
||||
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
|
||||
|
||||
def _prefix_for_namespace(self, namespace):
|
||||
"""Find the currently active prefix for the given namespace."""
|
||||
if namespace is None:
|
||||
return None
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
return inverted_nsmap[namespace]
|
||||
return None
|
||||
|
||||
def end(self, name):
|
||||
self.soup.endData()
|
||||
completed_tag = self.soup.tagStack[-1]
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = None
|
||||
if namespace is not None:
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
nsprefix = inverted_nsmap[namespace]
|
||||
break
|
||||
self.soup.handle_endtag(name, nsprefix)
|
||||
if len(self.nsmaps) > 1:
|
||||
# This tag, or one of its parents, introduced a namespace
|
||||
# mapping, so pop it off the stack.
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def doctype(self, name, pubid, system):
|
||||
self.soup.endData()
|
||||
doctype = Doctype.for_name_and_ids(name, pubid, system)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def comment(self, content):
|
||||
"Handle comments as Comment objects."
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(content)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
return etree.HTMLParser
|
||||
|
||||
def feed(self, markup):
|
||||
encoding = self.soup.original_encoding
|
||||
try:
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
@@ -1,829 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Beautiful Soup bonus library: Unicode, Dammit
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
|
||||
# Import a library to autodetect character encodings.
|
||||
chardet_type = None
|
||||
try:
|
||||
# First try the fast C implementation.
|
||||
# PyPI package: cchardet
|
||||
import cchardet
|
||||
def chardet_dammit(s):
|
||||
return cchardet.detect(s)['encoding']
|
||||
except ImportError:
|
||||
try:
|
||||
# Fall back to the pure Python implementation
|
||||
# Debian package: python-chardet
|
||||
# PyPI package: chardet
|
||||
import chardet
|
||||
def chardet_dammit(s):
|
||||
return chardet.detect(s)['encoding']
|
||||
#import chardet.constants
|
||||
#chardet.constants._debug = 1
|
||||
except ImportError:
|
||||
# No chardet available.
|
||||
def chardet_dammit(s):
|
||||
return None
|
||||
|
||||
# Available from http://cjkpython.i18n.org/.
|
||||
try:
|
||||
import iconv_codec
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
xml_encoding_re = re.compile(
|
||||
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
|
||||
html_meta_re = re.compile(
|
||||
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
|
||||
|
||||
class EntitySubstitution(object):
|
||||
|
||||
"""Substitute XML or HTML entities for the corresponding characters."""
|
||||
|
||||
def _populate_class_variables():
|
||||
lookup = {}
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
# is handled elsewhere.
|
||||
characters_for_re.append(character)
|
||||
lookup[character] = name
|
||||
# But we do want to turn " into the quotation mark.
|
||||
reverse_lookup[name] = character
|
||||
re_definition = "[%s]" % "".join(characters_for_re)
|
||||
return lookup, reverse_lookup, re.compile(re_definition)
|
||||
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
|
||||
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
|
||||
|
||||
CHARACTER_TO_XML_ENTITY = {
|
||||
"'": "apos",
|
||||
'"': "quot",
|
||||
"&": "amp",
|
||||
"<": "lt",
|
||||
">": "gt",
|
||||
}
|
||||
|
||||
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
|
||||
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
|
||||
")")
|
||||
|
||||
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
|
||||
|
||||
@classmethod
|
||||
def _substitute_html_entity(cls, matchobj):
|
||||
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def _substitute_xml_entity(cls, matchobj):
|
||||
"""Used with a regular expression to substitute the
|
||||
appropriate XML entity for an XML special character."""
|
||||
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def quoted_attribute_value(self, value):
|
||||
"""Make a value into a quoted XML attribute, possibly escaping it.
|
||||
|
||||
Most strings will be quoted using double quotes.
|
||||
|
||||
Bob's Bar -> "Bob's Bar"
|
||||
|
||||
If a string contains double quotes, it will be quoted using
|
||||
single quotes.
|
||||
|
||||
Welcome to "my bar" -> 'Welcome to "my bar"'
|
||||
|
||||
If a string contains both single and double quotes, the
|
||||
double quotes will be escaped, and the string will be quoted
|
||||
using double quotes.
|
||||
|
||||
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
|
||||
"""
|
||||
quote_with = '"'
|
||||
if '"' in value:
|
||||
if "'" in value:
|
||||
# The string contains both single and double
|
||||
# quotes. Turn the double quotes into
|
||||
# entities. We quote the double quotes rather than
|
||||
# the single quotes because the entity name is
|
||||
# """ whether this is HTML or XML. If we
|
||||
# quoted the single quotes, we'd have to decide
|
||||
# between ' and &squot;.
|
||||
replace_with = """
|
||||
value = value.replace('"', replace_with)
|
||||
else:
|
||||
# There are double quotes but no single quotes.
|
||||
# We can use single quotes to quote the attribute.
|
||||
quote_with = "'"
|
||||
return quote_with + value + quote_with
|
||||
|
||||
@classmethod
|
||||
def substitute_xml(cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign
|
||||
will become <, the greater-than sign will become >,
|
||||
and any ampersands will become &. If you want ampersands
|
||||
that appear to be part of an entity definition to be left
|
||||
alone, use substitute_xml_containing_entities() instead.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets and ampersands.
|
||||
value = cls.AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_xml_containing_entities(
|
||||
cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign will
|
||||
become <, the greater-than sign will become >, and any
|
||||
ampersands that are not part of an entity defition will
|
||||
become &.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets, and ampersands that aren't part of
|
||||
# entities.
|
||||
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_html(cls, s):
|
||||
"""Replace certain Unicode characters with named HTML entities.
|
||||
|
||||
This differs from data.encode(encoding, 'xmlcharrefreplace')
|
||||
in that the goal is to make the result more readable (to those
|
||||
with ASCII displays) rather than to recover from
|
||||
errors. There's absolutely nothing wrong with a UTF-8 string
|
||||
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
|
||||
character with "é" will make it more readable to some
|
||||
people.
|
||||
"""
|
||||
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
|
||||
cls._substitute_html_entity, s)
|
||||
|
||||
|
||||
class EncodingDetector:
|
||||
"""Suggests a number of possible encodings for a bytestring.
|
||||
|
||||
Order of precedence:
|
||||
|
||||
1. Encodings you specifically tell EncodingDetector to try first
|
||||
(the override_encodings argument to the constructor).
|
||||
|
||||
2. An encoding declared within the bytestring itself, either in an
|
||||
XML declaration (if the bytestring is to be interpreted as an XML
|
||||
document), or in a <meta> tag (if the bytestring is to be
|
||||
interpreted as an HTML document.)
|
||||
|
||||
3. An encoding detected through textual analysis by chardet,
|
||||
cchardet, or a similar external library.
|
||||
|
||||
4. UTF-8.
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
|
||||
# First order of business: strip a byte-order mark.
|
||||
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
|
||||
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def encodings(self):
|
||||
"""Yield a number of encodings that might work for this markup."""
|
||||
tried = set()
|
||||
for e in self.override_encodings:
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
# Did the document originally start with a byte-order mark
|
||||
# that indicated its encoding?
|
||||
if self._usable(self.sniffed_encoding, tried):
|
||||
yield self.sniffed_encoding
|
||||
|
||||
# Look within the document for an XML or HTML encoding
|
||||
# declaration.
|
||||
if self.declared_encoding is None:
|
||||
self.declared_encoding = self.find_declared_encoding(
|
||||
self.markup, self.is_html)
|
||||
if self._usable(self.declared_encoding, tried):
|
||||
yield self.declared_encoding
|
||||
|
||||
# Use third-party character set detection to guess at the
|
||||
# encoding.
|
||||
if self.chardet_encoding is None:
|
||||
self.chardet_encoding = chardet_dammit(self.markup)
|
||||
if self._usable(self.chardet_encoding, tried):
|
||||
yield self.chardet_encoding
|
||||
|
||||
# As a last-ditch effort, try utf-8 and windows-1252.
|
||||
for e in ('utf-8', 'windows-1252'):
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
@classmethod
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
data = data[2:]
|
||||
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16le'
|
||||
data = data[2:]
|
||||
elif data[:3] == b'\xef\xbb\xbf':
|
||||
encoding = 'utf-8'
|
||||
data = data[3:]
|
||||
elif data[:4] == b'\x00\x00\xfe\xff':
|
||||
encoding = 'utf-32be'
|
||||
data = data[4:]
|
||||
elif data[:4] == b'\xff\xfe\x00\x00':
|
||||
encoding = 'utf-32le'
|
||||
data = data[4:]
|
||||
return data, encoding
|
||||
|
||||
@classmethod
|
||||
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
|
||||
"""Given a document, tries to find its declared encoding.
|
||||
|
||||
An XML encoding is declared at the beginning of the document.
|
||||
|
||||
An HTML encoding is declared in a <meta> tag, hopefully near the
|
||||
beginning of the document.
|
||||
"""
|
||||
if search_entire_document:
|
||||
xml_endpos = html_endpos = len(markup)
|
||||
else:
|
||||
xml_endpos = 1024
|
||||
html_endpos = max(2048, int(len(markup) * 0.05))
|
||||
|
||||
declared_encoding = None
|
||||
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
|
||||
if not declared_encoding_match and is_html:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
|
||||
class UnicodeDammit:
|
||||
"""A class for detecting the encoding of a *ML document and
|
||||
converting it to a Unicode string. If the source encoding is
|
||||
windows-1252, can replace MS smart quotes with their HTML or XML
|
||||
equivalents."""
|
||||
|
||||
# This dictionary maps commonly seen values for "charset" in HTML
|
||||
# meta tags to the corresponding Python codec names. It only covers
|
||||
# values that aren't in Python's aliases and can't be determined
|
||||
# by the heuristics in find_codec.
|
||||
CHARSET_ALIASES = {"macintosh": "mac-roman",
|
||||
"x-sjis": "shift-jis"}
|
||||
|
||||
ENCODINGS_WITH_SMART_QUOTES = [
|
||||
"windows-1252",
|
||||
"iso-8859-1",
|
||||
"iso-8859-2",
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
# The encoding detector may have stripped a byte-order mark.
|
||||
# Use the stripped markup from this point on.
|
||||
self.markup = self.detector.markup
|
||||
|
||||
u = None
|
||||
for encoding in self.detector.encodings:
|
||||
markup = self.detector.markup
|
||||
u = self._convert_from(encoding)
|
||||
if u is not None:
|
||||
break
|
||||
|
||||
if not u:
|
||||
# None of the encodings worked. As an absolute last resort,
|
||||
# try them again with character replacement.
|
||||
|
||||
for encoding in self.detector.encodings:
|
||||
if encoding != "ascii":
|
||||
u = self._convert_from(encoding, "replace")
|
||||
if u is not None:
|
||||
logging.warning(
|
||||
"Some characters could not be decoded, and were "
|
||||
"replaced with REPLACEMENT CHARACTER.")
|
||||
self.contains_replacement_characters = True
|
||||
break
|
||||
|
||||
# If none of that worked, we could at this point force it to
|
||||
# ASCII, but that would destroy so much data that I think
|
||||
# giving up is better.
|
||||
self.unicode_markup = u
|
||||
if not u:
|
||||
self.original_encoding = None
|
||||
|
||||
def _sub_ms_char(self, match):
|
||||
"""Changes a MS smart quote character to an XML or HTML
|
||||
entity, or an ASCII character."""
|
||||
orig = match.group(1)
|
||||
if self.smart_quotes_to == 'ascii':
|
||||
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
|
||||
else:
|
||||
sub = self.MS_CHARS.get(orig)
|
||||
if type(sub) == tuple:
|
||||
if self.smart_quotes_to == 'xml':
|
||||
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
|
||||
else:
|
||||
sub = '&'.encode() + sub[0].encode() + ';'.encode()
|
||||
else:
|
||||
sub = sub.encode()
|
||||
return sub
|
||||
|
||||
def _convert_from(self, proposed, errors="strict"):
|
||||
proposed = self.find_codec(proposed)
|
||||
if not proposed or (proposed, errors) in self.tried_encodings:
|
||||
return None
|
||||
self.tried_encodings.append((proposed, errors))
|
||||
markup = self.markup
|
||||
# Convert smart quotes to HTML if coming from an encoding
|
||||
# that might have them.
|
||||
if (self.smart_quotes_to is not None
|
||||
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
|
||||
smart_quotes_re = b"([\x80-\x9f])"
|
||||
smart_quotes_compiled = re.compile(smart_quotes_re)
|
||||
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
|
||||
|
||||
try:
|
||||
#print "Trying to convert document to %s (errors=%s)" % (
|
||||
# proposed, errors)
|
||||
u = self._to_unicode(markup, proposed, errors)
|
||||
self.markup = u
|
||||
self.original_encoding = proposed
|
||||
except Exception as e:
|
||||
#print "That didn't work!"
|
||||
#print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
if not self.is_html:
|
||||
return None
|
||||
return self.detector.declared_encoding
|
||||
|
||||
def find_codec(self, charset):
|
||||
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
|
||||
or (charset and self._codec(charset.replace("-", "")))
|
||||
or (charset and self._codec(charset.replace("-", "_")))
|
||||
or (charset and charset.lower())
|
||||
or charset
|
||||
)
|
||||
if value:
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
def _codec(self, charset):
|
||||
if not charset:
|
||||
return charset
|
||||
codec = None
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
codec = charset
|
||||
except (LookupError, ValueError):
|
||||
pass
|
||||
return codec
|
||||
|
||||
|
||||
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
|
||||
MS_CHARS = {b'\x80': ('euro', '20AC'),
|
||||
b'\x81': ' ',
|
||||
b'\x82': ('sbquo', '201A'),
|
||||
b'\x83': ('fnof', '192'),
|
||||
b'\x84': ('bdquo', '201E'),
|
||||
b'\x85': ('hellip', '2026'),
|
||||
b'\x86': ('dagger', '2020'),
|
||||
b'\x87': ('Dagger', '2021'),
|
||||
b'\x88': ('circ', '2C6'),
|
||||
b'\x89': ('permil', '2030'),
|
||||
b'\x8A': ('Scaron', '160'),
|
||||
b'\x8B': ('lsaquo', '2039'),
|
||||
b'\x8C': ('OElig', '152'),
|
||||
b'\x8D': '?',
|
||||
b'\x8E': ('#x17D', '17D'),
|
||||
b'\x8F': '?',
|
||||
b'\x90': '?',
|
||||
b'\x91': ('lsquo', '2018'),
|
||||
b'\x92': ('rsquo', '2019'),
|
||||
b'\x93': ('ldquo', '201C'),
|
||||
b'\x94': ('rdquo', '201D'),
|
||||
b'\x95': ('bull', '2022'),
|
||||
b'\x96': ('ndash', '2013'),
|
||||
b'\x97': ('mdash', '2014'),
|
||||
b'\x98': ('tilde', '2DC'),
|
||||
b'\x99': ('trade', '2122'),
|
||||
b'\x9a': ('scaron', '161'),
|
||||
b'\x9b': ('rsaquo', '203A'),
|
||||
b'\x9c': ('oelig', '153'),
|
||||
b'\x9d': '?',
|
||||
b'\x9e': ('#x17E', '17E'),
|
||||
b'\x9f': ('Yuml', ''),}
|
||||
|
||||
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
|
||||
# horrors like stripping diacritical marks to turn á into a, but also
|
||||
# contains non-horrors like turning “ into ".
|
||||
MS_CHARS_TO_ASCII = {
|
||||
b'\x80' : 'EUR',
|
||||
b'\x81' : ' ',
|
||||
b'\x82' : ',',
|
||||
b'\x83' : 'f',
|
||||
b'\x84' : ',,',
|
||||
b'\x85' : '...',
|
||||
b'\x86' : '+',
|
||||
b'\x87' : '++',
|
||||
b'\x88' : '^',
|
||||
b'\x89' : '%',
|
||||
b'\x8a' : 'S',
|
||||
b'\x8b' : '<',
|
||||
b'\x8c' : 'OE',
|
||||
b'\x8d' : '?',
|
||||
b'\x8e' : 'Z',
|
||||
b'\x8f' : '?',
|
||||
b'\x90' : '?',
|
||||
b'\x91' : "'",
|
||||
b'\x92' : "'",
|
||||
b'\x93' : '"',
|
||||
b'\x94' : '"',
|
||||
b'\x95' : '*',
|
||||
b'\x96' : '-',
|
||||
b'\x97' : '--',
|
||||
b'\x98' : '~',
|
||||
b'\x99' : '(TM)',
|
||||
b'\x9a' : 's',
|
||||
b'\x9b' : '>',
|
||||
b'\x9c' : 'oe',
|
||||
b'\x9d' : '?',
|
||||
b'\x9e' : 'z',
|
||||
b'\x9f' : 'Y',
|
||||
b'\xa0' : ' ',
|
||||
b'\xa1' : '!',
|
||||
b'\xa2' : 'c',
|
||||
b'\xa3' : 'GBP',
|
||||
b'\xa4' : '$', #This approximation is especially parochial--this is the
|
||||
#generic currency symbol.
|
||||
b'\xa5' : 'YEN',
|
||||
b'\xa6' : '|',
|
||||
b'\xa7' : 'S',
|
||||
b'\xa8' : '..',
|
||||
b'\xa9' : '',
|
||||
b'\xaa' : '(th)',
|
||||
b'\xab' : '<<',
|
||||
b'\xac' : '!',
|
||||
b'\xad' : ' ',
|
||||
b'\xae' : '(R)',
|
||||
b'\xaf' : '-',
|
||||
b'\xb0' : 'o',
|
||||
b'\xb1' : '+-',
|
||||
b'\xb2' : '2',
|
||||
b'\xb3' : '3',
|
||||
b'\xb4' : ("'", 'acute'),
|
||||
b'\xb5' : 'u',
|
||||
b'\xb6' : 'P',
|
||||
b'\xb7' : '*',
|
||||
b'\xb8' : ',',
|
||||
b'\xb9' : '1',
|
||||
b'\xba' : '(th)',
|
||||
b'\xbb' : '>>',
|
||||
b'\xbc' : '1/4',
|
||||
b'\xbd' : '1/2',
|
||||
b'\xbe' : '3/4',
|
||||
b'\xbf' : '?',
|
||||
b'\xc0' : 'A',
|
||||
b'\xc1' : 'A',
|
||||
b'\xc2' : 'A',
|
||||
b'\xc3' : 'A',
|
||||
b'\xc4' : 'A',
|
||||
b'\xc5' : 'A',
|
||||
b'\xc6' : 'AE',
|
||||
b'\xc7' : 'C',
|
||||
b'\xc8' : 'E',
|
||||
b'\xc9' : 'E',
|
||||
b'\xca' : 'E',
|
||||
b'\xcb' : 'E',
|
||||
b'\xcc' : 'I',
|
||||
b'\xcd' : 'I',
|
||||
b'\xce' : 'I',
|
||||
b'\xcf' : 'I',
|
||||
b'\xd0' : 'D',
|
||||
b'\xd1' : 'N',
|
||||
b'\xd2' : 'O',
|
||||
b'\xd3' : 'O',
|
||||
b'\xd4' : 'O',
|
||||
b'\xd5' : 'O',
|
||||
b'\xd6' : 'O',
|
||||
b'\xd7' : '*',
|
||||
b'\xd8' : 'O',
|
||||
b'\xd9' : 'U',
|
||||
b'\xda' : 'U',
|
||||
b'\xdb' : 'U',
|
||||
b'\xdc' : 'U',
|
||||
b'\xdd' : 'Y',
|
||||
b'\xde' : 'b',
|
||||
b'\xdf' : 'B',
|
||||
b'\xe0' : 'a',
|
||||
b'\xe1' : 'a',
|
||||
b'\xe2' : 'a',
|
||||
b'\xe3' : 'a',
|
||||
b'\xe4' : 'a',
|
||||
b'\xe5' : 'a',
|
||||
b'\xe6' : 'ae',
|
||||
b'\xe7' : 'c',
|
||||
b'\xe8' : 'e',
|
||||
b'\xe9' : 'e',
|
||||
b'\xea' : 'e',
|
||||
b'\xeb' : 'e',
|
||||
b'\xec' : 'i',
|
||||
b'\xed' : 'i',
|
||||
b'\xee' : 'i',
|
||||
b'\xef' : 'i',
|
||||
b'\xf0' : 'o',
|
||||
b'\xf1' : 'n',
|
||||
b'\xf2' : 'o',
|
||||
b'\xf3' : 'o',
|
||||
b'\xf4' : 'o',
|
||||
b'\xf5' : 'o',
|
||||
b'\xf6' : 'o',
|
||||
b'\xf7' : '/',
|
||||
b'\xf8' : 'o',
|
||||
b'\xf9' : 'u',
|
||||
b'\xfa' : 'u',
|
||||
b'\xfb' : 'u',
|
||||
b'\xfc' : 'u',
|
||||
b'\xfd' : 'y',
|
||||
b'\xfe' : 'b',
|
||||
b'\xff' : 'y',
|
||||
}
|
||||
|
||||
# A map used when removing rogue Windows-1252/ISO-8859-1
|
||||
# characters in otherwise UTF-8 documents.
|
||||
#
|
||||
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
|
||||
# Windows-1252.
|
||||
WINDOWS_1252_TO_UTF8 = {
|
||||
0x80 : b'\xe2\x82\xac', # €
|
||||
0x82 : b'\xe2\x80\x9a', # ‚
|
||||
0x83 : b'\xc6\x92', # ƒ
|
||||
0x84 : b'\xe2\x80\x9e', # „
|
||||
0x85 : b'\xe2\x80\xa6', # …
|
||||
0x86 : b'\xe2\x80\xa0', # †
|
||||
0x87 : b'\xe2\x80\xa1', # ‡
|
||||
0x88 : b'\xcb\x86', # ˆ
|
||||
0x89 : b'\xe2\x80\xb0', # ‰
|
||||
0x8a : b'\xc5\xa0', # Š
|
||||
0x8b : b'\xe2\x80\xb9', # ‹
|
||||
0x8c : b'\xc5\x92', # Œ
|
||||
0x8e : b'\xc5\xbd', # Ž
|
||||
0x91 : b'\xe2\x80\x98', # ‘
|
||||
0x92 : b'\xe2\x80\x99', # ’
|
||||
0x93 : b'\xe2\x80\x9c', # “
|
||||
0x94 : b'\xe2\x80\x9d', # ”
|
||||
0x95 : b'\xe2\x80\xa2', # •
|
||||
0x96 : b'\xe2\x80\x93', # –
|
||||
0x97 : b'\xe2\x80\x94', # —
|
||||
0x98 : b'\xcb\x9c', # ˜
|
||||
0x99 : b'\xe2\x84\xa2', # ™
|
||||
0x9a : b'\xc5\xa1', # š
|
||||
0x9b : b'\xe2\x80\xba', # ›
|
||||
0x9c : b'\xc5\x93', # œ
|
||||
0x9e : b'\xc5\xbe', # ž
|
||||
0x9f : b'\xc5\xb8', # Ÿ
|
||||
0xa0 : b'\xc2\xa0', #
|
||||
0xa1 : b'\xc2\xa1', # ¡
|
||||
0xa2 : b'\xc2\xa2', # ¢
|
||||
0xa3 : b'\xc2\xa3', # £
|
||||
0xa4 : b'\xc2\xa4', # ¤
|
||||
0xa5 : b'\xc2\xa5', # ¥
|
||||
0xa6 : b'\xc2\xa6', # ¦
|
||||
0xa7 : b'\xc2\xa7', # §
|
||||
0xa8 : b'\xc2\xa8', # ¨
|
||||
0xa9 : b'\xc2\xa9', # ©
|
||||
0xaa : b'\xc2\xaa', # ª
|
||||
0xab : b'\xc2\xab', # «
|
||||
0xac : b'\xc2\xac', # ¬
|
||||
0xad : b'\xc2\xad', #
|
||||
0xae : b'\xc2\xae', # ®
|
||||
0xaf : b'\xc2\xaf', # ¯
|
||||
0xb0 : b'\xc2\xb0', # °
|
||||
0xb1 : b'\xc2\xb1', # ±
|
||||
0xb2 : b'\xc2\xb2', # ²
|
||||
0xb3 : b'\xc2\xb3', # ³
|
||||
0xb4 : b'\xc2\xb4', # ´
|
||||
0xb5 : b'\xc2\xb5', # µ
|
||||
0xb6 : b'\xc2\xb6', # ¶
|
||||
0xb7 : b'\xc2\xb7', # ·
|
||||
0xb8 : b'\xc2\xb8', # ¸
|
||||
0xb9 : b'\xc2\xb9', # ¹
|
||||
0xba : b'\xc2\xba', # º
|
||||
0xbb : b'\xc2\xbb', # »
|
||||
0xbc : b'\xc2\xbc', # ¼
|
||||
0xbd : b'\xc2\xbd', # ½
|
||||
0xbe : b'\xc2\xbe', # ¾
|
||||
0xbf : b'\xc2\xbf', # ¿
|
||||
0xc0 : b'\xc3\x80', # À
|
||||
0xc1 : b'\xc3\x81', # Á
|
||||
0xc2 : b'\xc3\x82', # Â
|
||||
0xc3 : b'\xc3\x83', # Ã
|
||||
0xc4 : b'\xc3\x84', # Ä
|
||||
0xc5 : b'\xc3\x85', # Å
|
||||
0xc6 : b'\xc3\x86', # Æ
|
||||
0xc7 : b'\xc3\x87', # Ç
|
||||
0xc8 : b'\xc3\x88', # È
|
||||
0xc9 : b'\xc3\x89', # É
|
||||
0xca : b'\xc3\x8a', # Ê
|
||||
0xcb : b'\xc3\x8b', # Ë
|
||||
0xcc : b'\xc3\x8c', # Ì
|
||||
0xcd : b'\xc3\x8d', # Í
|
||||
0xce : b'\xc3\x8e', # Î
|
||||
0xcf : b'\xc3\x8f', # Ï
|
||||
0xd0 : b'\xc3\x90', # Ð
|
||||
0xd1 : b'\xc3\x91', # Ñ
|
||||
0xd2 : b'\xc3\x92', # Ò
|
||||
0xd3 : b'\xc3\x93', # Ó
|
||||
0xd4 : b'\xc3\x94', # Ô
|
||||
0xd5 : b'\xc3\x95', # Õ
|
||||
0xd6 : b'\xc3\x96', # Ö
|
||||
0xd7 : b'\xc3\x97', # ×
|
||||
0xd8 : b'\xc3\x98', # Ø
|
||||
0xd9 : b'\xc3\x99', # Ù
|
||||
0xda : b'\xc3\x9a', # Ú
|
||||
0xdb : b'\xc3\x9b', # Û
|
||||
0xdc : b'\xc3\x9c', # Ü
|
||||
0xdd : b'\xc3\x9d', # Ý
|
||||
0xde : b'\xc3\x9e', # Þ
|
||||
0xdf : b'\xc3\x9f', # ß
|
||||
0xe0 : b'\xc3\xa0', # à
|
||||
0xe1 : b'\xa1', # á
|
||||
0xe2 : b'\xc3\xa2', # â
|
||||
0xe3 : b'\xc3\xa3', # ã
|
||||
0xe4 : b'\xc3\xa4', # ä
|
||||
0xe5 : b'\xc3\xa5', # å
|
||||
0xe6 : b'\xc3\xa6', # æ
|
||||
0xe7 : b'\xc3\xa7', # ç
|
||||
0xe8 : b'\xc3\xa8', # è
|
||||
0xe9 : b'\xc3\xa9', # é
|
||||
0xea : b'\xc3\xaa', # ê
|
||||
0xeb : b'\xc3\xab', # ë
|
||||
0xec : b'\xc3\xac', # ì
|
||||
0xed : b'\xc3\xad', # í
|
||||
0xee : b'\xc3\xae', # î
|
||||
0xef : b'\xc3\xaf', # ï
|
||||
0xf0 : b'\xc3\xb0', # ð
|
||||
0xf1 : b'\xc3\xb1', # ñ
|
||||
0xf2 : b'\xc3\xb2', # ò
|
||||
0xf3 : b'\xc3\xb3', # ó
|
||||
0xf4 : b'\xc3\xb4', # ô
|
||||
0xf5 : b'\xc3\xb5', # õ
|
||||
0xf6 : b'\xc3\xb6', # ö
|
||||
0xf7 : b'\xc3\xb7', # ÷
|
||||
0xf8 : b'\xc3\xb8', # ø
|
||||
0xf9 : b'\xc3\xb9', # ù
|
||||
0xfa : b'\xc3\xba', # ú
|
||||
0xfb : b'\xc3\xbb', # û
|
||||
0xfc : b'\xc3\xbc', # ü
|
||||
0xfd : b'\xc3\xbd', # ý
|
||||
0xfe : b'\xc3\xbe', # þ
|
||||
}
|
||||
|
||||
MULTIBYTE_MARKERS_AND_SIZES = [
|
||||
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
|
||||
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
|
||||
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
|
||||
]
|
||||
|
||||
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
|
||||
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
|
||||
|
||||
@classmethod
|
||||
def detwingle(cls, in_bytes, main_encoding="utf8",
|
||||
embedded_encoding="windows-1252"):
|
||||
"""Fix characters from one encoding embedded in some other encoding.
|
||||
|
||||
Currently the only situation supported is Windows-1252 (or its
|
||||
subset ISO-8859-1), embedded in UTF-8.
|
||||
|
||||
The input must be a bytestring. If you've already converted
|
||||
the document to Unicode, you're too late.
|
||||
|
||||
The output is a bytestring in which `embedded_encoding`
|
||||
characters have been converted to their `main_encoding`
|
||||
equivalents.
|
||||
"""
|
||||
if embedded_encoding.replace('_', '-').lower() not in (
|
||||
'windows-1252', 'windows_1252'):
|
||||
raise NotImplementedError(
|
||||
"Windows-1252 and ISO-8859-1 are the only currently supported "
|
||||
"embedded encodings.")
|
||||
|
||||
if main_encoding.lower() not in ('utf8', 'utf-8'):
|
||||
raise NotImplementedError(
|
||||
"UTF-8 is the only currently supported main encoding.")
|
||||
|
||||
byte_chunks = []
|
||||
|
||||
chunk_start = 0
|
||||
pos = 0
|
||||
while pos < len(in_bytes):
|
||||
byte = in_bytes[pos]
|
||||
if not isinstance(byte, int):
|
||||
# Python 2.x
|
||||
byte = ord(byte)
|
||||
if (byte >= cls.FIRST_MULTIBYTE_MARKER
|
||||
and byte <= cls.LAST_MULTIBYTE_MARKER):
|
||||
# This is the start of a UTF-8 multibyte character. Skip
|
||||
# to the end.
|
||||
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
|
||||
if byte >= start and byte <= end:
|
||||
pos += size
|
||||
break
|
||||
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
|
||||
# We found a Windows-1252 character!
|
||||
# Save the string up to this point as a chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:pos])
|
||||
|
||||
# Now translate the Windows-1252 character into UTF-8
|
||||
# and add it as another, one-byte chunk.
|
||||
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
|
||||
pos += 1
|
||||
chunk_start = pos
|
||||
else:
|
||||
# Go on to the next character.
|
||||
pos += 1
|
||||
if chunk_start == 0:
|
||||
# The string is unchanged.
|
||||
return in_bytes
|
||||
else:
|
||||
# Store the final chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:])
|
||||
return b''.join(byte_chunks)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
import cProfile
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
|
||||
import os
|
||||
import pstats
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import sys
|
||||
import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
for builder in builder_registry.builders:
|
||||
if name in builder.features:
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
|
||||
This lets you see how lxml parses a document when no Beautiful
|
||||
Soup code is running.
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
|
||||
def _p(self, s):
|
||||
print(s)
|
||||
|
||||
def handle_starttag(self, name, attrs):
|
||||
self._p("%s START" % name)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self._p("%s END" % name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self._p("%s DATA" % data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
self._p("%s CHARREF" % name)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
self._p("%s ENTITYREF" % name)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self._p("%s COMMENT" % data)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self._p("%s DECL" % data)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
self._p("%s UNKNOWN-DECL" % data)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self._p("%s PI" % data)
|
||||
|
||||
def htmlparser_trace(data):
|
||||
"""Print out the HTMLParser events that occur during parsing.
|
||||
|
||||
This lets you see how HTMLParser parses a document when no
|
||||
Beautiful Soup code is running.
|
||||
"""
|
||||
parser = AnnouncingParser()
|
||||
parser.feed(data)
|
||||
|
||||
_vowels = "aeiou"
|
||||
_consonants = "bcdfghjklmnpqrstvwxyz"
|
||||
|
||||
def rword(length=5):
|
||||
"Generate a random word-like string."
|
||||
s = ''
|
||||
for i in range(length):
|
||||
if i % 2 == 0:
|
||||
t = _consonants
|
||||
else:
|
||||
t = _vowels
|
||||
s += random.choice(t)
|
||||
return s
|
||||
|
||||
def rsentence(length=4):
|
||||
"Generate a random sentence-like string."
|
||||
return " ".join(rword(random.randint(4,9)) for i in range(length))
|
||||
|
||||
def rdoc(num_elements=1000):
|
||||
"""Randomly generate an invalid HTML document."""
|
||||
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
|
||||
elements = []
|
||||
for i in range(num_elements):
|
||||
choice = random.randint(0,3)
|
||||
if choice == 0:
|
||||
# New tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("<%s>" % tag_name)
|
||||
elif choice == 1:
|
||||
elements.append(rsentence(random.randint(1,4)))
|
||||
elif choice == 2:
|
||||
# Close a tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("</%s>" % tag_name)
|
||||
return "<html>" + "\n".join(elements) + "</html>"
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
try:
|
||||
a = time.time()
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
|
||||
data = rdoc(num_elements)
|
||||
vars = dict(bs4=bs4, data=data, parser=parser)
|
||||
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
|
||||
|
||||
stats = pstats.Stats(filename)
|
||||
# stats.strip_dirs()
|
||||
stats.sort_stats("cumulative")
|
||||
stats.print_stats('_html5lib|bs4', 50)
|
||||
|
||||
if __name__ == '__main__':
|
||||
diagnose(sys.stdin.read())
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,592 +0,0 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
Comment,
|
||||
ContentMetaAttributeValue,
|
||||
Doctype,
|
||||
SoupStrainer,
|
||||
)
|
||||
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
default_builder = HTMLParserTreeBuilder
|
||||
|
||||
|
||||
class SoupTest(unittest.TestCase):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return default_builder()
|
||||
|
||||
def soup(self, markup, **kwargs):
|
||||
"""Build a Beautiful Soup object from markup."""
|
||||
builder = kwargs.pop('builder', self.default_builder)
|
||||
return BeautifulSoup(markup, builder=builder, **kwargs)
|
||||
|
||||
def document_for(self, markup):
|
||||
"""Turn an HTML fragment into a document.
|
||||
|
||||
The details depend on the builder.
|
||||
"""
|
||||
return self.default_builder.test_fragment_to_document(markup)
|
||||
|
||||
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
|
||||
builder = self.default_builder
|
||||
obj = BeautifulSoup(to_parse, builder=builder)
|
||||
if compare_parsed_to is None:
|
||||
compare_parsed_to = to_parse
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
"""A basic test of a treebuilder's competence.
|
||||
|
||||
Any HTML treebuilder, present or future, should be able to pass
|
||||
these tests. With invalid markup, there's room for interpretation,
|
||||
and different parsers can handle it differently. But with the
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
|
||||
# Make sure a Doctype object was created.
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual(doctype.__class__, Doctype)
|
||||
self.assertEqual(doctype, doctype_fragment)
|
||||
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
|
||||
|
||||
# Make sure that the doctype was correctly associated with the
|
||||
# parse tree and that the rest of the document parsed.
|
||||
self.assertEqual(soup.p.contents[0], 'foo')
|
||||
|
||||
def _document_with_doctype(self, doctype_fragment):
|
||||
"""Generate and parse a document with the given doctype."""
|
||||
doctype = '<!DOCTYPE %s>' % doctype_fragment
|
||||
markup = doctype + '\n<p>foo</p>'
|
||||
soup = self.soup(markup)
|
||||
return doctype, soup
|
||||
|
||||
def test_normal_doctypes(self):
|
||||
"""Make sure normal, everyday HTML doctypes are handled correctly."""
|
||||
self.assertDoctypeHandled("html")
|
||||
self.assertDoctypeHandled(
|
||||
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
|
||||
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_public_doctype_with_url(self):
|
||||
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
|
||||
self.assertDoctypeHandled(doctype)
|
||||
|
||||
def test_system_doctype(self):
|
||||
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# We can handle a namespaced doctype with a system ID.
|
||||
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# Test a namespaced doctype with a public id.
|
||||
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out more or less the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
This is important because the builder is part of a
|
||||
BeautifulSoup object, and we want to be able to copy that.
|
||||
"""
|
||||
copy.deepcopy(self.default_builder)
|
||||
|
||||
def test_p_tag_is_never_empty_element(self):
|
||||
"""A <p> tag is never designated as an empty-element tag.
|
||||
|
||||
Even if the markup shows it as an empty-element tag, it
|
||||
shouldn't be presented that way.
|
||||
"""
|
||||
soup = self.soup("<p/>")
|
||||
self.assertFalse(soup.p.is_empty_element)
|
||||
self.assertEqual(str(soup.p), "<p></p>")
|
||||
|
||||
def test_unclosed_tags_get_closed(self):
|
||||
"""A tag that's not closed by the end of the document should be closed.
|
||||
|
||||
This applies to all tags except empty-element tags.
|
||||
"""
|
||||
self.assertSoupEquals("<p>", "<p></p>")
|
||||
self.assertSoupEquals("<b>", "<b></b>")
|
||||
|
||||
self.assertSoupEquals("<br>", "<br/>")
|
||||
|
||||
def test_br_is_always_empty_element_tag(self):
|
||||
"""A <br> tag is designated as an empty-element tag.
|
||||
|
||||
Some parsers treat <br></br> as one <br/> tag, some parsers as
|
||||
two tags, but it should always be an empty-element tag.
|
||||
"""
|
||||
soup = self.soup("<br></br>")
|
||||
self.assertTrue(soup.br.is_empty_element)
|
||||
self.assertEqual(str(soup.br), "<br/>")
|
||||
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
self.assertSoupEquals(markup)
|
||||
|
||||
soup = self.soup(markup)
|
||||
comment = soup.find(text="foobar")
|
||||
self.assertEqual(comment.__class__, Comment)
|
||||
|
||||
# The comment is properly integrated into the tree.
|
||||
foo = soup.find(text="foo")
|
||||
self.assertEqual(comment, foo.next_element)
|
||||
baz = soup.find(text="baz")
|
||||
self.assertEqual(comment, baz.previous_element)
|
||||
|
||||
def test_preserved_whitespace_in_pre_and_textarea(self):
|
||||
"""Whitespace must be preserved in <pre> and <textarea> tags."""
|
||||
self.assertSoupEquals("<pre> </pre>")
|
||||
self.assertSoupEquals("<textarea> woo </textarea>")
|
||||
|
||||
def test_nested_inline_elements(self):
|
||||
"""Inline elements can be nested indefinitely."""
|
||||
b_tag = "<b>Inside a B tag</b>"
|
||||
self.assertSoupEquals(b_tag)
|
||||
|
||||
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
def test_nested_block_level_elements(self):
|
||||
"""Block elements can be nested."""
|
||||
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
|
||||
blockquote = soup.blockquote
|
||||
self.assertEqual(blockquote.p.b.string, 'Foo')
|
||||
self.assertEqual(blockquote.b.string, 'Foo')
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""One table can go inside another one."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tr><td>foo</td></tr></table>'
|
||||
'</td></tr></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_deeply_nested_multivalued_attribute(self):
|
||||
# html5lib can set the attributes of the same tag many times
|
||||
# as it rearranges the tree. This has caused problems with
|
||||
# multivalued attributes.
|
||||
markup = '<table><div><div class="css"></div></div></table>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
|
||||
def test_quot_entity_converted_to_quotation_mark(self):
|
||||
self.assertSoupEquals("<p>I said "good day!"</p>",
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
|
||||
def test_multipart_strings(self):
|
||||
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
very least they should not choke on namespaces or lose
|
||||
data."""
|
||||
|
||||
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode())
|
||||
html = soup.html
|
||||
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
|
||||
|
||||
def test_multivalued_attribute_value_becomes_list(self):
|
||||
markup = b'<a class="foo bar">'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(['foo', 'bar'], soup.a['class'])
|
||||
|
||||
#
|
||||
# Generally speaking, tests below this point are more tests of
|
||||
# Beautiful Soup than tests of the tree builders. But parsers are
|
||||
# weird, so we run these tests separately for every tree builder
|
||||
# to detect any differences between them.
|
||||
#
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
|
||||
parse_only=strainer)
|
||||
self.assertEqual(soup.decode(), "<b>bold</b>")
|
||||
|
||||
def test_single_quote_attribute_values_become_double_quotes(self):
|
||||
self.assertSoupEquals("<foo attr='bar'></foo>",
|
||||
'<foo attr="bar"></foo>')
|
||||
|
||||
def test_attribute_values_with_nested_quotes_are_left_alone(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
self.assertSoupEquals(text)
|
||||
|
||||
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
soup = self.soup(text)
|
||||
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
|
||||
self.assertSoupEquals(
|
||||
soup.foo.decode(),
|
||||
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
|
||||
|
||||
def test_ampersand_in_attribute_value_gets_escaped(self):
|
||||
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
|
||||
'<this is="really messed up & stuff"></this>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>',
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>')
|
||||
|
||||
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
|
||||
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
|
||||
|
||||
def test_entities_in_strings_converted_during_parsing(self):
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
# Microsoft smart quotes are converted to Unicode characters during
|
||||
# parsing.
|
||||
quote = b"<p>\x91Foo\x92</p>"
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
def test_real_iso_latin_document(self):
|
||||
# Smoke test of interrelated functionality, using an
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
iso_latin_html = unicode_html.encode("iso-8859-1")
|
||||
|
||||
# Parse the ISO-Latin-1 HTML.
|
||||
soup = self.soup(iso_latin_html)
|
||||
# Encode it to UTF-8.
|
||||
result = soup.encode("utf-8")
|
||||
|
||||
# What do we expect the result to look like? Well, it would
|
||||
# look like unicode_html, except that the META tag would say
|
||||
# UTF-8 instead of ISO-Latin-1.
|
||||
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
|
||||
|
||||
# And, of course, it would be in UTF-8, not Unicode.
|
||||
expected = expected.encode("utf-8")
|
||||
|
||||
# Ta-da!
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_real_shift_jis_document(self):
|
||||
# Smoke test to make sure the parser can handle a document in
|
||||
# Shift-JIS encoding, without choking.
|
||||
shift_jis_html = (
|
||||
b'<html><head></head><body><pre>'
|
||||
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
|
||||
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
|
||||
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
|
||||
b'</pre></body></html>')
|
||||
unicode_html = shift_jis_html.decode("shift-jis")
|
||||
soup = self.soup(unicode_html)
|
||||
|
||||
# Make sure the parse tree is correctly encoded to various
|
||||
# encodings.
|
||||
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
|
||||
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
|
||||
|
||||
def test_real_hebrew_document(self):
|
||||
# A real-world test to make sure we can convert ISO-8859-9 (a
|
||||
# Hebrew encoding) to UTF-8.
|
||||
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
|
||||
soup = self.soup(
|
||||
hebrew_document, from_encoding="iso8859-8")
|
||||
self.assertEqual(soup.original_encoding, 'iso8859-8')
|
||||
self.assertEqual(
|
||||
soup.encode('utf-8'),
|
||||
hebrew_document.decode("iso8859-8").encode("utf-8"))
|
||||
|
||||
def test_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta content="text/html; charset=x-sjis" '
|
||||
'http-equiv="Content-type"/>')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
|
||||
content = parsed_meta['content']
|
||||
self.assertEqual('text/html; charset=x-sjis', content)
|
||||
|
||||
# But that value is actually a ContentMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
|
||||
|
||||
# For the rest of the story, see TestSubstitutions in
|
||||
# test_tree.py.
|
||||
|
||||
def test_html5_style_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', id="encoding")
|
||||
charset = parsed_meta['charset']
|
||||
self.assertEqual('x-sjis', charset)
|
||||
|
||||
# But that value is actually a CharsetMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('utf8', charset.encode("utf8"))
|
||||
|
||||
def test_tag_with_no_attributes_can_have_attributes_added(self):
|
||||
data = self.soup("<a>text</a>")
|
||||
data.a['foo'] = 'bar'
|
||||
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8"), markup)
|
||||
|
||||
def test_formatter_processes_script_tag_for_xml_documents(self):
|
||||
doc = """
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
encoded = soup.encode()
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode("latin1"),
|
||||
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
|
||||
|
||||
def test_large_xml_document(self):
|
||||
"""A large XML document should come out the same as it went in."""
|
||||
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
|
||||
+ b'0' * (2**12)
|
||||
+ b'</root>')
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(soup.encode("utf-8"), markup)
|
||||
|
||||
|
||||
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
|
||||
self.assertSoupEquals("<p>", "<p/>")
|
||||
self.assertSoupEquals("<p>foo</p>")
|
||||
|
||||
def test_namespaces_are_preserved(self):
|
||||
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
|
||||
soup = self.soup(markup)
|
||||
root = soup.root
|
||||
self.assertEqual("http://example.com/", root['xmlns:a'])
|
||||
self.assertEqual("http://example.net/", root['xmlns:b'])
|
||||
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
|
||||
# XHTML documents in any particular way.
|
||||
pass
|
||||
|
||||
def test_html_tags_have_namespace(self):
|
||||
markup = "<a>"
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
|
||||
|
||||
def test_svg_tags_have_namespace(self):
|
||||
markup = '<svg><circle/></svg>'
|
||||
soup = self.soup(markup)
|
||||
namespace = "http://www.w3.org/2000/svg"
|
||||
self.assertEqual(namespace, soup.svg.namespace)
|
||||
self.assertEqual(namespace, soup.circle.namespace)
|
||||
|
||||
|
||||
def test_mathml_tags_have_namespace(self):
|
||||
markup = '<math><msqrt>5</msqrt></math>'
|
||||
soup = self.soup(markup)
|
||||
namespace = 'http://www.w3.org/1998/Math/MathML'
|
||||
self.assertEqual(namespace, soup.math.namespace)
|
||||
self.assertEqual(namespace, soup.msqrt.namespace)
|
||||
|
||||
def test_xml_declaration_becomes_comment(self):
|
||||
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertTrue(isinstance(soup.contents[0], Comment))
|
||||
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
|
||||
self.assertEqual("html", soup.contents[0].next_element.name)
|
||||
|
||||
def skipIf(condition, reason):
|
||||
def nothing(test, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def decorator(test_item):
|
||||
if condition:
|
||||
return nothing
|
||||
else:
|
||||
return test_item
|
||||
|
||||
return decorator
|
||||
@@ -1 +0,0 @@
|
||||
"The beautifulsoup tests."
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
builder_registry as registry,
|
||||
HTMLParserTreeBuilder,
|
||||
TreeBuilderRegistry,
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError:
|
||||
HTML5LIB_PRESENT = False
|
||||
|
||||
try:
|
||||
from bs4.builder import (
|
||||
LXMLTreeBuilderForXML,
|
||||
LXMLTreeBuilder,
|
||||
)
|
||||
LXML_PRESENT = True
|
||||
except ImportError:
|
||||
LXML_PRESENT = False
|
||||
|
||||
|
||||
class BuiltInRegistryTest(unittest.TestCase):
|
||||
"""Test the built-in registry with the default builders registered."""
|
||||
|
||||
def test_combination(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('fast', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('permissive', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('strict', 'html'),
|
||||
HTMLParserTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib', 'html'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
def test_lookup_by_markup_type(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
|
||||
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('xml'), None)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
|
||||
|
||||
def test_named_library(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('lxml', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('lxml', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
self.assertEqual(registry.lookup('html.parser'),
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
self.assertRaises(ValueError, BeautifulSoup,
|
||||
"", features="no-such-feature")
|
||||
|
||||
class RegistryTest(unittest.TestCase):
|
||||
"""Test the TreeBuilderRegistry class in general."""
|
||||
|
||||
def setUp(self):
|
||||
self.registry = TreeBuilderRegistry()
|
||||
|
||||
def builder_for_features(self, *feature_list):
|
||||
cls = type('Builder_' + '_'.join(feature_list),
|
||||
(object,), {'features' : feature_list})
|
||||
|
||||
self.registry.register(cls)
|
||||
return cls
|
||||
|
||||
def test_register_with_no_features(self):
|
||||
builder = self.builder_for_features()
|
||||
|
||||
# Since the builder advertises no features, you can't find it
|
||||
# by looking up features.
|
||||
self.assertEqual(self.registry.lookup('foo'), None)
|
||||
|
||||
# But you can find it by doing a lookup with no features, if
|
||||
# this happens to be the only registered builder.
|
||||
self.assertEqual(self.registry.lookup(), builder)
|
||||
|
||||
def test_register_with_features_makes_lookup_succeed(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('foo'), builder)
|
||||
self.assertEqual(self.registry.lookup('bar'), builder)
|
||||
|
||||
def test_lookup_fails_when_no_builder_implements_feature(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('baz'), None)
|
||||
|
||||
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
|
||||
builder1 = self.builder_for_features('foo')
|
||||
builder2 = self.builder_for_features('bar')
|
||||
self.assertEqual(self.registry.lookup(), builder2)
|
||||
|
||||
def test_lookup_fails_when_no_tree_builders_registered(self):
|
||||
self.assertEqual(self.registry.lookup(), None)
|
||||
|
||||
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
|
||||
has_one = self.builder_for_features('foo')
|
||||
has_the_other = self.builder_for_features('bar')
|
||||
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
|
||||
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
|
||||
lacks_one = self.builder_for_features('bar')
|
||||
has_the_other = self.builder_for_features('foo')
|
||||
|
||||
# There are two builders featuring 'foo' and 'bar', but
|
||||
# the one that also features 'quux' was registered later.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar'),
|
||||
has_both_late)
|
||||
|
||||
# There is only one builder featuring 'foo', 'bar', and 'baz'.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
|
||||
has_both_early)
|
||||
|
||||
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
|
||||
builder1 = self.builder_for_features('foo', 'bar')
|
||||
builder2 = self.builder_for_features('foo', 'baz')
|
||||
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
||||
@@ -1,36 +0,0 @@
|
||||
"Test harness for doctests."
|
||||
|
||||
# pylint: disable-msg=E0611,W0142
|
||||
|
||||
__metaclass__ = type
|
||||
__all__ = [
|
||||
'additional_tests',
|
||||
]
|
||||
|
||||
import atexit
|
||||
import doctest
|
||||
import os
|
||||
#from pkg_resources import (
|
||||
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
|
||||
import unittest
|
||||
|
||||
DOCTEST_FLAGS = (
|
||||
doctest.ELLIPSIS |
|
||||
doctest.NORMALIZE_WHITESPACE |
|
||||
doctest.REPORT_NDIFF)
|
||||
|
||||
|
||||
# def additional_tests():
|
||||
# "Run the doc tests (README.txt and docs/*, if any exist)"
|
||||
# doctest_files = [
|
||||
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
|
||||
# if resource_exists('bs4', 'docs'):
|
||||
# for name in resource_listdir('bs4', 'docs'):
|
||||
# if name.endswith('.txt'):
|
||||
# doctest_files.append(
|
||||
# os.path.abspath(
|
||||
# resource_filename('bs4', 'docs/%s' % name)))
|
||||
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
|
||||
# atexit.register(cleanup_resources)
|
||||
# return unittest.TestSuite((
|
||||
# doctest.DocFileSuite(*doctest_files, **kwargs)))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Tests to ensure that the html5lib tree builder generates good trees."""
|
||||
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
HTML5TreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not HTML5LIB_PRESENT,
|
||||
"html5lib seems not to be present, not testing its tree builder.")
|
||||
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
"""See ``HTML5TreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTML5TreeBuilder()
|
||||
|
||||
def test_soupstrainer(self):
|
||||
# The html5lib tree builder does not support SoupStrainers.
|
||||
strainer = SoupStrainer("b")
|
||||
markup = "<p>A <b>bold</b> statement.</p>"
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(
|
||||
soup.decode(), self.document_for(markup))
|
||||
|
||||
self.assertTrue(
|
||||
"the html5lib tree builder doesn't support parse_only" in
|
||||
str(w[0].message))
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""html5lib inserts <tbody> tags where other parsers don't."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tbody><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
|
||||
'</td></tr></tbody></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_xml_declaration_followed_by_doctype(self):
|
||||
markup = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<p>foo</p>
|
||||
</body>
|
||||
</html>'''
|
||||
soup = self.soup(markup)
|
||||
# Verify that we can reach the <p> tag; this means the tree is connected.
|
||||
self.assertEqual(b"<p>foo</p>", soup.p.encode())
|
||||
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Tests to ensure that the html.parser tree builder generates good
|
||||
trees."""
|
||||
|
||||
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
|
||||
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTMLParserTreeBuilder()
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Tests to ensure that the lxml tree builder generates good trees."""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import lxml.etree
|
||||
LXML_PRESENT = True
|
||||
LXML_VERSION = lxml.etree.LXML_VERSION
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
LXML_VERSION = (0,)
|
||||
|
||||
if LXML_PRESENT:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, SoupStrainer
|
||||
from bs4.testing import skipIf
|
||||
from bs4.tests import test_htmlparser
|
||||
from bs4.testing import (
|
||||
HTMLTreeBuilderSmokeTest,
|
||||
XMLTreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its tree builder.")
|
||||
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilder()
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
|
||||
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
|
||||
# test if an old version of lxml is installed.
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
|
||||
"Skipping doctype test for old version of lxml to avoid segfault.")
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_beautifulstonesoup_is_xml_parser(self):
|
||||
# Make sure that the deprecated BSS class uses an xml builder
|
||||
# if one is installed.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = BeautifulStoneSoup("<b />")
|
||||
self.assertEqual(u"<b/>", unicode(soup.b))
|
||||
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""lxml strips the XML definition from an XHTML doc, which is fine."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b''),
|
||||
markup.replace(b'\n', b'').replace(
|
||||
b'<?xml version="1.0" encoding="utf-8"?>', b''))
|
||||
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its XML tree builder.")
|
||||
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilderForXML()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user