mirror of
https://git.yoctoproject.org/poky
synced 2026-01-30 13:28:43 +01:00
Compare commits
428 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58863ad092 | ||
|
|
fb8bf6a75e | ||
|
|
c282df8993 | ||
|
|
204ad23574 | ||
|
|
14a42e2719 | ||
|
|
dae5ee4e5e | ||
|
|
bebaaf1d21 | ||
|
|
aefcb6b115 | ||
|
|
79e4cc8954 | ||
|
|
faf6ada4f2 | ||
|
|
a779191033 | ||
|
|
1930286e3f | ||
|
|
d4db68ae6b | ||
|
|
3beebd9447 | ||
|
|
aa10f103e1 | ||
|
|
d54de3ebc0 | ||
|
|
fc3d4ce07d | ||
|
|
217d56ec31 | ||
|
|
6fd01ed845 | ||
|
|
767142c1ba | ||
|
|
eca2b438bc | ||
|
|
2d569edae2 | ||
|
|
2ad71d0ae8 | ||
|
|
73a04a266c | ||
|
|
b3269fc2e6 | ||
|
|
0facda51ce | ||
|
|
8cf47f82b9 | ||
|
|
8ef55cc0da | ||
|
|
6d34267e0a | ||
|
|
9d6d902326 | ||
|
|
0d8ed50877 | ||
|
|
b38454c2e3 | ||
|
|
19f07a31a6 | ||
|
|
03666c8a74 | ||
|
|
85f6cf736b | ||
|
|
a01280b7ab | ||
|
|
800a3dc9b0 | ||
|
|
bdfee8758e | ||
|
|
915498e230 | ||
|
|
8897773fe4 | ||
|
|
2e6494e55a | ||
|
|
55fbde1fde | ||
|
|
3a2725e5d9 | ||
|
|
adcc476412 | ||
|
|
ab4cc02bf8 | ||
|
|
01c1167336 | ||
|
|
c1803b774a | ||
|
|
d526b3f9ac | ||
|
|
e74c4a5ff4 | ||
|
|
1a99652a88 | ||
|
|
20db29fb4d | ||
|
|
f7b041121e | ||
|
|
7a263b2e60 | ||
|
|
117d9b2f45 | ||
|
|
a4162fa9fa | ||
|
|
5a3899981c | ||
|
|
db031c40bb | ||
|
|
b64eae5767 | ||
|
|
0e6473ad75 | ||
|
|
27fc73496c | ||
|
|
012e1a4431 | ||
|
|
137f52ac3a | ||
|
|
ebe3096910 | ||
|
|
f1c45d15c2 | ||
|
|
fd35017edf | ||
|
|
e07aa344ee | ||
|
|
112839bebe | ||
|
|
f48d1a75e1 | ||
|
|
4d41954e94 | ||
|
|
53b0be3761 | ||
|
|
ca052426a6 | ||
|
|
540b92736c | ||
|
|
a93005e6d0 | ||
|
|
cfc5952b11 | ||
|
|
1b492dfcdd | ||
|
|
bf3ee430a4 | ||
|
|
abd315bc05 | ||
|
|
1e6d987374 | ||
|
|
38a334ad84 | ||
|
|
00fce45b55 | ||
|
|
fcd25c6d2e | ||
|
|
9f363a9c8a | ||
|
|
19bce8f5c6 | ||
|
|
1d909fb8da | ||
|
|
d19d976bf5 | ||
|
|
ea2e7dbcd7 | ||
|
|
215c4d948d | ||
|
|
9ae261263a | ||
|
|
22690105da | ||
|
|
3054c73445 | ||
|
|
c5a583e8bd | ||
|
|
7113efd02d | ||
|
|
b469799103 | ||
|
|
0891b8789d | ||
|
|
b8b7df8304 | ||
|
|
0c1c0877e8 | ||
|
|
c930052636 | ||
|
|
6d307e9b0c | ||
|
|
d0315a6cdf | ||
|
|
5f0d25152b | ||
|
|
9c4ff467f6 | ||
|
|
6adbd2deb9 | ||
|
|
9fd145d27e | ||
|
|
29812e6173 | ||
|
|
80bc382c62 | ||
|
|
0dc2a530df | ||
|
|
6579836d82 | ||
|
|
3037db60f7 | ||
|
|
46f73593c0 | ||
|
|
f460fd853b | ||
|
|
d098f7ed05 | ||
|
|
192a9e1031 | ||
|
|
c4ebd5d28b | ||
|
|
15892013ce | ||
|
|
489df6edb8 | ||
|
|
3251b84c20 | ||
|
|
1f994e8171 | ||
|
|
433ec67686 | ||
|
|
f77133783e | ||
|
|
29855df01e | ||
|
|
7bd5bf8947 | ||
|
|
d03e94ef47 | ||
|
|
bf6f9f44ad | ||
|
|
54e3c92279 | ||
|
|
c6b0ce743f | ||
|
|
6923ef6f94 | ||
|
|
9bbe7473a9 | ||
|
|
9c2e4e50a8 | ||
|
|
e8a260c9b8 | ||
|
|
8e7d7e5c3a | ||
|
|
5ed8733bac | ||
|
|
ff71dd264a | ||
|
|
12f3536d36 | ||
|
|
94e96643db | ||
|
|
b6cc30adf4 | ||
|
|
feaf9a98df | ||
|
|
9bdf737982 | ||
|
|
c316df044a | ||
|
|
4cf1a6af8e | ||
|
|
35e54baa51 | ||
|
|
4ac156de84 | ||
|
|
0143d3a6a9 | ||
|
|
fecee8ffdc | ||
|
|
67cabcd94f | ||
|
|
96852794bc | ||
|
|
c59e3bd26d | ||
|
|
c18e52c0c8 | ||
|
|
8e64c535af | ||
|
|
763bff1f22 | ||
|
|
0d1f75b9d6 | ||
|
|
542d9770f2 | ||
|
|
13cb1aea8c | ||
|
|
55303f7a38 | ||
|
|
8a00b63e43 | ||
|
|
ec75238f6c | ||
|
|
b90dd7944e | ||
|
|
82c8438428 | ||
|
|
0fecd492b2 | ||
|
|
f2a6123ba3 | ||
|
|
ff2621b86c | ||
|
|
2a3805a666 | ||
|
|
4c6ceb07f0 | ||
|
|
1f718df76e | ||
|
|
93e3df91aa | ||
|
|
dea47b2715 | ||
|
|
6e9632e979 | ||
|
|
59a85d3a95 | ||
|
|
b630f2f536 | ||
|
|
b91ca2c5fd | ||
|
|
5db1e07e1c | ||
|
|
f5d869d9d6 | ||
|
|
1c34a41ad2 | ||
|
|
9bef9b9ddd | ||
|
|
016d607e23 | ||
|
|
42195a3ff5 | ||
|
|
15ff8423dc | ||
|
|
83767cbe90 | ||
|
|
8b3b21494f | ||
|
|
17b4994c5f | ||
|
|
d97f1c2697 | ||
|
|
a1f594881d | ||
|
|
991597a272 | ||
|
|
eebe97cd35 | ||
|
|
50572b0104 | ||
|
|
769fb519be | ||
|
|
8e02546ddd | ||
|
|
dc565377c6 | ||
|
|
8e11a94b90 | ||
|
|
cfdeaeeb77 | ||
|
|
51d5204084 | ||
|
|
909a80fe34 | ||
|
|
71164f126b | ||
|
|
76ba20f9c0 | ||
|
|
82c567748a | ||
|
|
d3953fcb40 | ||
|
|
6f79439ce4 | ||
|
|
00af07c6b6 | ||
|
|
f30d619a63 | ||
|
|
2f598a8318 | ||
|
|
d2c3e23af6 | ||
|
|
ceb5a66d0b | ||
|
|
620718b05d | ||
|
|
8b255bd491 | ||
|
|
33cda871d3 | ||
|
|
f7ba14a571 | ||
|
|
bf32370c5e | ||
|
|
2ec5a5473e | ||
|
|
dddb84aae0 | ||
|
|
6c3ccc8ae9 | ||
|
|
0cf128ca1b | ||
|
|
86da1430b7 | ||
|
|
2a53df980d | ||
|
|
a1a8857fa6 | ||
|
|
6a07977e76 | ||
|
|
0de0abd9e4 | ||
|
|
8010a0f2cf | ||
|
|
37d5b56cb6 | ||
|
|
d31f7ec85b | ||
|
|
ad9ba796fa | ||
|
|
f7fc59f2fd | ||
|
|
f70b8b393d | ||
|
|
72a43adb4d | ||
|
|
64090cf0d8 | ||
|
|
41cca6fbe7 | ||
|
|
de51204518 | ||
|
|
eed2260137 | ||
|
|
81b8da4c88 | ||
|
|
289ccaa24d | ||
|
|
646cd97d24 | ||
|
|
384863c7bc | ||
|
|
0e65f09580 | ||
|
|
43a04ddc7f | ||
|
|
8abe4b8b2a | ||
|
|
53b33d85de | ||
|
|
9fc095a439 | ||
|
|
e13f2681b7 | ||
|
|
6dd21a9f15 | ||
|
|
b6e41cf744 | ||
|
|
cc6d968241 | ||
|
|
74bb618474 | ||
|
|
ab5c5e3a0e | ||
|
|
8e354428a2 | ||
|
|
a6f2e49038 | ||
|
|
63c0b4a441 | ||
|
|
9d20b675dd | ||
|
|
39e09709bf | ||
|
|
36e42c0ddb | ||
|
|
446acfb5a4 | ||
|
|
d7c61053da | ||
|
|
b52e2f4f2e | ||
|
|
4b7d844d84 | ||
|
|
510c27ad8c | ||
|
|
9ffc238025 | ||
|
|
7461790c39 | ||
|
|
69df8dc63f | ||
|
|
2658acef69 | ||
|
|
f20e4c0cf6 | ||
|
|
d77ee86680 | ||
|
|
36576c7087 | ||
|
|
7cacecf444 | ||
|
|
0b8a386a68 | ||
|
|
87f7ca2613 | ||
|
|
068a9f5cbe | ||
|
|
27a34b69a0 | ||
|
|
ec321182bd | ||
|
|
6540ecdb37 | ||
|
|
84d0b6fd98 | ||
|
|
37ca92bb2a | ||
|
|
8dde9d4bd4 | ||
|
|
b0feb20abc | ||
|
|
6ede9224f8 | ||
|
|
112c10ac64 | ||
|
|
484b928531 | ||
|
|
0fb10cf659 | ||
|
|
f7fd58319c | ||
|
|
554962b380 | ||
|
|
6a2ff9b067 | ||
|
|
b48b07f1fb | ||
|
|
b268f7cc93 | ||
|
|
8e9950dbaa | ||
|
|
4eab67dda8 | ||
|
|
89398c3e07 | ||
|
|
c54b9fb2ed | ||
|
|
64271845dc | ||
|
|
0d65f6c16d | ||
|
|
f8adeb08f1 | ||
|
|
e0cb09c6ac | ||
|
|
d04fdd5f9e | ||
|
|
b30db74cd8 | ||
|
|
7526e8d006 | ||
|
|
fb760567a3 | ||
|
|
b6079e0c71 | ||
|
|
8d0600569b | ||
|
|
dfd5bbdfa9 | ||
|
|
49ece9bb51 | ||
|
|
10710d7a92 | ||
|
|
527574602a | ||
|
|
25cdadb86b | ||
|
|
6b3673db74 | ||
|
|
e7d461a473 | ||
|
|
358794f2fb | ||
|
|
3c76f85d5f | ||
|
|
7e9d8bcada | ||
|
|
08613cc339 | ||
|
|
5381289530 | ||
|
|
d57978aafc | ||
|
|
7fe785d692 | ||
|
|
b3e9e56756 | ||
|
|
5d7fe4a07b | ||
|
|
6062cbe8db | ||
|
|
9f41c7df9e | ||
|
|
4e0180b746 | ||
|
|
fa75856b4b | ||
|
|
8d8e8d0a8e | ||
|
|
4157aa7c0b | ||
|
|
b0811fe4a2 | ||
|
|
8f21845460 | ||
|
|
3d101b429d | ||
|
|
a4814ac1b0 | ||
|
|
9b0df21b87 | ||
|
|
2d51c7e62c | ||
|
|
22861f8031 | ||
|
|
74dec87ce2 | ||
|
|
2558a15919 | ||
|
|
2c13c5d3fe | ||
|
|
769c4ebb4f | ||
|
|
69767a27cc | ||
|
|
c9a9e0199b | ||
|
|
c458dde820 | ||
|
|
9393fdbd7e | ||
|
|
0cfc7dd0a5 | ||
|
|
04ffa0b961 | ||
|
|
fdf882c091 | ||
|
|
2d40d3228d | ||
|
|
a832f18ac2 | ||
|
|
a0d91bbdef | ||
|
|
68f431e850 | ||
|
|
e5c3c1501b | ||
|
|
bb6990e057 | ||
|
|
2f2b081589 | ||
|
|
9d4df89e5b | ||
|
|
a6d7512b5e | ||
|
|
637580101c | ||
|
|
673bb3cffc | ||
|
|
09f6349eeb | ||
|
|
31f39a91e6 | ||
|
|
b8ea994e11 | ||
|
|
3725cdf43a | ||
|
|
3244f4540c | ||
|
|
ec853e4eea | ||
|
|
07fdc5a275 | ||
|
|
dda084e13c | ||
|
|
044039dc8e | ||
|
|
0bc80a3850 | ||
|
|
1020bc3de3 | ||
|
|
a291eb108b | ||
|
|
d0c969eeab | ||
|
|
6af63cc898 | ||
|
|
5211fb73f0 | ||
|
|
c8279678d4 | ||
|
|
21b15bc6cd | ||
|
|
de6e6a5a62 | ||
|
|
5e7218e8b0 | ||
|
|
bc6651cb31 | ||
|
|
a16aa96a08 | ||
|
|
59c7cb37bc | ||
|
|
1eceece8f6 | ||
|
|
0d9dd1d3da | ||
|
|
f09b49dd64 | ||
|
|
b9304ab75c | ||
|
|
28c4a4976d | ||
|
|
a6f13fe42f | ||
|
|
e8404413fe | ||
|
|
d6cbbee29c | ||
|
|
d6e0ea59b2 | ||
|
|
be22ea0314 | ||
|
|
3fc8d29953 | ||
|
|
becb32bb30 | ||
|
|
fa34c42d19 | ||
|
|
942e35d651 | ||
|
|
c35cecebc6 | ||
|
|
c72d8913b3 | ||
|
|
e3743bbe94 | ||
|
|
02627ad3d9 | ||
|
|
ecf1e3d1b1 | ||
|
|
2310ca25ed | ||
|
|
0185dcd883 | ||
|
|
b9e61a3203 | ||
|
|
e111bb329c | ||
|
|
28f6830a49 | ||
|
|
c292340b7a | ||
|
|
118cf7bc86 | ||
|
|
4d1745feb5 | ||
|
|
d6751f2293 | ||
|
|
880e8b26ed | ||
|
|
8d5259d953 | ||
|
|
9e8bb32215 | ||
|
|
aa8bfdfa22 | ||
|
|
5c69d24f56 | ||
|
|
b70ef7b95a | ||
|
|
bd00bc3d0d | ||
|
|
e741ebf210 | ||
|
|
db012b429f | ||
|
|
90a03e9c9d | ||
|
|
b466e00cf6 | ||
|
|
00af5317eb | ||
|
|
db7f4f31c9 | ||
|
|
33e95afc83 | ||
|
|
9bfb78bff6 | ||
|
|
cccad8c33f | ||
|
|
2138890fa6 | ||
|
|
19750cac36 | ||
|
|
5deb78802a | ||
|
|
ffdef91586 | ||
|
|
09430c66b3 | ||
|
|
929d04b404 | ||
|
|
081fddd3e4 | ||
|
|
9fcd5826d9 | ||
|
|
df87cb27ef | ||
|
|
2eb659d765 | ||
|
|
f3a177cf04 | ||
|
|
58a629a1a0 | ||
|
|
b9b5aeffa6 | ||
|
|
ff5510b3fa | ||
|
|
9aff3a4ec0 | ||
|
|
16ddd45421 | ||
|
|
1a6c3a385c | ||
|
|
e95863cee0 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -8,7 +8,7 @@ scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
!meta-hob
|
||||
hob-image-*.bb
|
||||
*.swp
|
||||
*.orig
|
||||
@@ -21,6 +21,3 @@ documentation/user-manual/user-manual.html
|
||||
documentation/user-manual/user-manual.pdf
|
||||
documentation/user-manual/user-manual.tgz
|
||||
pull-*/
|
||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
||||
bitbake/lib/toaster/contrib/tts/log/*
|
||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
||||
29
README
29
README
@@ -30,29 +30,20 @@ For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
meta-yocto(-bsp):
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -251,14 +251,14 @@ if used via a usb card reader):
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
6. If using core-image-minimal rootfs, install the kernel uImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
# cp uImage-beaglebone.bin /media/root/boot/uImage
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
# cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
@@ -352,14 +352,11 @@ Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
* TFTP server installed on your workstation
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -367,7 +364,7 @@ Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
--- Booting from NFS root ---
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
|
||||
|
||||
@@ -23,31 +23,369 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
|
||||
import optparse
|
||||
import warnings
|
||||
from traceback import format_exception
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
__version__ = "1.27.1"
|
||||
__version__ = "1.24.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
if not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
sys.exit("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
sys.exit("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
configParams = BitBakeConfigParameters()
|
||||
configuration = cookerdata.CookerConfiguration()
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
sys.exit("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
sys.exit(0)
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
sys.exit("Bitbake core version and program version mismatch!")
|
||||
try:
|
||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration()))
|
||||
except BBMainException as err:
|
||||
sys.exit(err)
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
sys.exit(1)
|
||||
ret = 1
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
sys.exit(ret)
|
||||
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2011-2015 Intel Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -20,15 +20,13 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
from collections import defaultdict
|
||||
import argparse
|
||||
import re
|
||||
import httplib, urlparse, json
|
||||
import subprocess
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
@@ -41,24 +39,26 @@ import bb.utils
|
||||
import bb.tinfoil
|
||||
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
console = logging.StreamHandler(output)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create('bitbake-layers', sys.stdout)
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
|
||||
class Commands():
|
||||
def main(args):
|
||||
cmds = Commands()
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
cmds.onecmd(' '.join(args))
|
||||
else:
|
||||
cmds.do_help('')
|
||||
return cmds.returncode
|
||||
|
||||
|
||||
class Commands(cmd.Cmd):
|
||||
def __init__(self):
|
||||
self.bbhandler = None
|
||||
self.returncode = 0
|
||||
self.bblayers = []
|
||||
cmd.Cmd.__init__(self)
|
||||
|
||||
def init_bbhandler(self, config_only = False):
|
||||
if not self.bbhandler:
|
||||
@@ -66,6 +66,27 @@ class Commands():
|
||||
self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
self.bbhandler.prepare(config_only)
|
||||
|
||||
def default(self, line):
|
||||
"""Handle unrecognised commands"""
|
||||
sys.stderr.write("Unrecognised command or option\n")
|
||||
self.do_help('')
|
||||
|
||||
def do_help(self, topic):
|
||||
"""display general help or help on a specified command"""
|
||||
if topic:
|
||||
sys.stdout.write('%s: ' % topic)
|
||||
cmd.Cmd.do_help(self, topic.replace('-', '_'))
|
||||
else:
|
||||
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||
sys.stdout.write("Available commands:\n")
|
||||
procnames = list(set(self.get_names()))
|
||||
for procname in procnames:
|
||||
if procname[:3] == 'do_':
|
||||
sys.stdout.write(" %s\n" % procname[3:].replace('_', '-'))
|
||||
doc = getattr(self, procname).__doc__
|
||||
if doc:
|
||||
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||
|
||||
def do_show_layers(self, args):
|
||||
"""show current configured layers"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
@@ -82,293 +103,6 @@ class Commands():
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||
|
||||
|
||||
def do_add_layer(self, args):
|
||||
"""Add a layer to bblayers.conf
|
||||
|
||||
Adds the specified layer to bblayers.conf
|
||||
"""
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
if not os.path.exists(layerdir):
|
||||
sys.stderr.write("Specified layer directory doesn't exist\n")
|
||||
return 1
|
||||
|
||||
layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
|
||||
if not os.path.exists(layer_conf):
|
||||
sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
|
||||
return 1
|
||||
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
(notadded, _) = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
|
||||
if notadded:
|
||||
for item in notadded:
|
||||
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
|
||||
|
||||
|
||||
def do_remove_layer(self, args):
|
||||
"""Remove a layer from bblayers.conf
|
||||
|
||||
Removes the specified layer from bblayers.conf
|
||||
"""
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
if args.layerdir.startswith('*'):
|
||||
layerdir = args.layerdir
|
||||
else:
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
|
||||
if notremoved:
|
||||
for item in notremoved:
|
||||
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
|
||||
return 1
|
||||
|
||||
|
||||
def get_json_data(self, apiurl):
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
conn = None
|
||||
_parsedurl = urlparse.urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
query = _parsedurl.query
|
||||
def parse_url(url):
|
||||
parsedurl = urlparse.urlparse(url)
|
||||
if parsedurl.netloc[0] == '[':
|
||||
host, port = parsedurl.netloc[1:].split(']', 1)
|
||||
if ':' in port:
|
||||
port = port.rsplit(':', 1)[1]
|
||||
else:
|
||||
port = None
|
||||
else:
|
||||
if parsedurl.netloc.count(':') == 1:
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
else:
|
||||
host = parsedurl.netloc
|
||||
port = None
|
||||
return (host, 80 if port is None else int(port))
|
||||
|
||||
if proxy_settings is None:
|
||||
host, port = parse_url(apiurl)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", path + "?" + query)
|
||||
else:
|
||||
host, port = parse_url(proxy_settings)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", apiurl)
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status != 200:
|
||||
raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
|
||||
return json.loads(r.read())
|
||||
|
||||
|
||||
def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False):
|
||||
def layeritems_info_id(items_name, layeritems):
|
||||
litems_id = None
|
||||
for li in layeritems:
|
||||
if li['name'] == items_name:
|
||||
litems_id = li['id']
|
||||
break
|
||||
return litems_id
|
||||
|
||||
def layerbranches_info(items_id, layerbranches):
|
||||
lbranch = {}
|
||||
for lb in layerbranches:
|
||||
if lb['layer'] == items_id and lb['branch'] == branchnum:
|
||||
lbranch['id'] = lb['id']
|
||||
lbranch['vcs_subdir'] = lb['vcs_subdir']
|
||||
break
|
||||
return lbranch
|
||||
|
||||
def layerdependencies_info(lb_id, layerdependencies):
|
||||
ld_deps = []
|
||||
for ld in layerdependencies:
|
||||
if ld['layerbranch'] == lb_id and not ld['dependency'] in ld_deps:
|
||||
ld_deps.append(ld['dependency'])
|
||||
if not ld_deps:
|
||||
logger.error("The dependency of layerDependencies is not found.")
|
||||
return ld_deps
|
||||
|
||||
def layeritems_info_name_subdir(items_id, layeritems):
|
||||
litems = {}
|
||||
for li in layeritems:
|
||||
if li['id'] == items_id:
|
||||
litems['vcs_url'] = li['vcs_url']
|
||||
litems['name'] = li['name']
|
||||
break
|
||||
return litems
|
||||
|
||||
if selfname:
|
||||
selfid = layeritems_info_id(layername, layeritems)
|
||||
lbinfo = layerbranches_info(selfid, layerbranches)
|
||||
if lbinfo:
|
||||
selfsubdir = lbinfo['vcs_subdir']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return
|
||||
selfurl = layeritems_info_name_subdir(selfid, layeritems)['vcs_url']
|
||||
if selfurl:
|
||||
return selfurl, selfsubdir
|
||||
else:
|
||||
logger.error("Cannot get layer %s git repo and subdir" % layername)
|
||||
return
|
||||
ldict = {}
|
||||
itemsid = layeritems_info_id(layername, layeritems)
|
||||
if not itemsid:
|
||||
return layername, None
|
||||
lbid = layerbranches_info(itemsid, layerbranches)
|
||||
if lbid:
|
||||
lbid = layerbranches_info(itemsid, layerbranches)['id']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return None, None
|
||||
for dependency in layerdependencies_info(lbid, layerdependencies):
|
||||
lname = layeritems_info_name_subdir(dependency, layeritems)['name']
|
||||
lurl = layeritems_info_name_subdir(dependency, layeritems)['vcs_url']
|
||||
lsubdir = layerbranches_info(dependency, layerbranches)['vcs_subdir']
|
||||
ldict[lname] = lurl, lsubdir
|
||||
return None, ldict
|
||||
|
||||
|
||||
def get_fetch_layer(self, fetchdir, url, subdir, fetch_layer):
|
||||
layername = self.get_layer_name(url)
|
||||
if os.path.splitext(layername)[1] == '.git':
|
||||
layername = os.path.splitext(layername)[0]
|
||||
repodir = os.path.join(fetchdir, layername)
|
||||
layerdir = os.path.join(repodir, subdir)
|
||||
if not os.path.exists(repodir):
|
||||
if fetch_layer:
|
||||
result = subprocess.call('git clone %s %s' % (url, repodir), shell = True)
|
||||
if result:
|
||||
logger.error("Failed to download %s" % url)
|
||||
return None, None
|
||||
else:
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.plain("Repository %s needs to be fetched" % url)
|
||||
return layername, layerdir
|
||||
elif os.path.exists(layerdir):
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.error("%s is not in %s" % (url, subdir))
|
||||
return None, None
|
||||
|
||||
|
||||
def do_layerindex_fetch(self, args):
|
||||
"""Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
|
||||
"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
apiurl = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True)
|
||||
if not apiurl:
|
||||
logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
|
||||
else:
|
||||
if apiurl[-1] != '/':
|
||||
apiurl += '/'
|
||||
apiurl += "api/"
|
||||
apilinks = self.get_json_data(apiurl)
|
||||
branches = self.get_json_data(apilinks['branches'])
|
||||
|
||||
branchnum = 0
|
||||
for branch in branches:
|
||||
if branch['name'] == args.branch:
|
||||
branchnum = branch['id']
|
||||
break
|
||||
if branchnum == 0:
|
||||
validbranches = ', '.join([branch['name'] for branch in branches])
|
||||
logger.error('Invalid layer branch name "%s". Valid branches: %s' % (args.branch, validbranches))
|
||||
return 1
|
||||
|
||||
ignore_layers = []
|
||||
for collection in self.bbhandler.config_data.getVar('BBFILE_COLLECTIONS', True).split():
|
||||
lname = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True)
|
||||
if lname:
|
||||
ignore_layers.append(lname)
|
||||
|
||||
if args.ignore:
|
||||
ignore_layers.extend(args.ignore.split(','))
|
||||
|
||||
layeritems = self.get_json_data(apilinks['layerItems'])
|
||||
layerbranches = self.get_json_data(apilinks['layerBranches'])
|
||||
layerdependencies = self.get_json_data(apilinks['layerDependencies'])
|
||||
invaluenames = []
|
||||
repourls = {}
|
||||
printlayers = []
|
||||
def query_dependencies(layers, layeritems, layerbranches, layerdependencies, branchnum):
|
||||
depslayer = []
|
||||
for layername in layers:
|
||||
invaluename, layerdict = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
if layerdict:
|
||||
repourls[layername] = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=True)
|
||||
for layer in layerdict:
|
||||
if not layer in ignore_layers:
|
||||
depslayer.append(layer)
|
||||
printlayers.append((layername, layer, layerdict[layer][0], layerdict[layer][1]))
|
||||
if not layer in ignore_layers and not layer in repourls:
|
||||
repourls[layer] = (layerdict[layer][0], layerdict[layer][1])
|
||||
if invaluename and not invaluename in invaluenames:
|
||||
invaluenames.append(invaluename)
|
||||
return depslayer
|
||||
|
||||
depslayers = query_dependencies(args.layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
while depslayers:
|
||||
depslayer = query_dependencies(depslayers, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
depslayers = depslayer
|
||||
if invaluenames:
|
||||
for invaluename in invaluenames:
|
||||
logger.error('Layer "%s" not found in layer index' % invaluename)
|
||||
return 1
|
||||
logger.plain("%s %s %s %s" % ("Layer".ljust(19), "Required by".ljust(19), "Git repository".ljust(54), "Subdirectory"))
|
||||
logger.plain('=' * 115)
|
||||
for layername in args.layername:
|
||||
layerurl = repourls[layername]
|
||||
logger.plain("%s %s %s %s" % (layername.ljust(20), '-'.ljust(20), layerurl[0].ljust(55), layerurl[1]))
|
||||
printedlayers = []
|
||||
for layer, dependency, gitrepo, subdirectory in printlayers:
|
||||
if dependency in printedlayers:
|
||||
continue
|
||||
logger.plain("%s %s %s %s" % (dependency.ljust(20), layer.ljust(20), gitrepo.ljust(55), subdirectory))
|
||||
printedlayers.append(dependency)
|
||||
|
||||
if repourls:
|
||||
fetchdir = self.bbhandler.config_data.getVar('BBLAYERS_FETCH_DIR', True)
|
||||
if not fetchdir:
|
||||
logger.error("Cannot get BBLAYERS_FETCH_DIR")
|
||||
return 1
|
||||
if not os.path.exists(fetchdir):
|
||||
os.makedirs(fetchdir)
|
||||
addlayers = []
|
||||
for repourl, subdir in repourls.values():
|
||||
name, layerdir = self.get_fetch_layer(fetchdir, repourl, subdir, not args.show_only)
|
||||
if not name:
|
||||
# Error already shown
|
||||
return 1
|
||||
addlayers.append((subdir, name, layerdir))
|
||||
if not args.show_only:
|
||||
for subdir, name, layerdir in set(addlayers):
|
||||
if os.path.exists(layerdir):
|
||||
if subdir:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % subdir)
|
||||
else:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % name)
|
||||
localargs = argparse.Namespace()
|
||||
localargs.layerdir = layerdir
|
||||
self.do_add_layer(localargs)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def do_layerindex_show_depends(self, args):
|
||||
"""Find layer dependencies from layer index.
|
||||
"""
|
||||
args.show_only = True
|
||||
args.ignore = []
|
||||
self.do_layerindex_fetch(args)
|
||||
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
@@ -381,13 +115,32 @@ Removes the specified layer from bblayers.conf
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, args.same_version, args.filenames, True)
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-s':
|
||||
show_same_ver_only = True
|
||||
else:
|
||||
sys.stderr.write("show-overlayed: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = defaultdict(list)
|
||||
@@ -414,7 +167,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
@@ -428,7 +181,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
@@ -443,15 +196,38 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
Lists the names of recipes and the available versions in each
|
||||
usage: show-recipes [-f] [-m] [pnspec]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with other available recipes indented underneath
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
pnspec = None
|
||||
title = 'Available recipes:'
|
||||
self.list_recipes(title, args.pnspec, False, False, args.filenames, args.multiple)
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-m':
|
||||
show_multi_provider_only = True
|
||||
elif not arg.startswith('-'):
|
||||
pnspec = arg
|
||||
title = 'Available recipes matching %s:' % pnspec
|
||||
else:
|
||||
sys.stderr.write("show-recipes: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only)
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
@@ -501,13 +277,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
|
||||
preffile = realfn[0]
|
||||
# We only display once per recipe, we should prefer non extended versions of the
|
||||
# recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
|
||||
# which would otherwise sort first).
|
||||
if realfn[1] and realfn[0] in self.bbhandler.cooker.recipecache.pkg_fn:
|
||||
continue
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
@@ -537,7 +307,9 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flatten layer configuration into a separate output directory.
|
||||
"""flattens layer configuration into a separate output directory.
|
||||
|
||||
usage: flatten [layer1 layer2 [layer3]...] <outputdir>
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
@@ -559,19 +331,26 @@ bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
if len(args.layer) == 1:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
return 1
|
||||
arglist = args.split()
|
||||
if len(arglist) < 1:
|
||||
logger.error('Please specify an output directory')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = args.outputdir
|
||||
if len(arglist) == 2:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = arglist[-1]
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
return 1
|
||||
return
|
||||
|
||||
self.init_bbhandler()
|
||||
layers = self.bblayers
|
||||
if len(args.layer) > 2:
|
||||
layernames = args.layer
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
@@ -720,12 +499,14 @@ build results (as the layer priority order has effectively changed).
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
Lists recipes with the bbappends that apply to them as subitems.
|
||||
usage: show-appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
if not self.bbhandler.cooker.collection.appendlist:
|
||||
logger.plain('No append files found')
|
||||
return 0
|
||||
return
|
||||
|
||||
logger.plain('=== Appended recipes ===')
|
||||
|
||||
@@ -764,6 +545,7 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
if best_filename in missing:
|
||||
logger.warn('%s: missing append for preferred version',
|
||||
best_filename)
|
||||
self.returncode |= 1
|
||||
|
||||
|
||||
def get_appends_for_files(self, filenames):
|
||||
@@ -782,13 +564,29 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
return appended, notappended
|
||||
|
||||
def do_show_cross_depends(self, args):
|
||||
"""Show dependencies between recipes that cross layer boundaries.
|
||||
"""figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Figure out the dependencies between recipes that cross layer boundaries.
|
||||
usage: show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]
|
||||
|
||||
NOTE: .bbappend files can impact the dependencies.
|
||||
Figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Options:
|
||||
-f show full file path
|
||||
-i ignore dependencies on items in the specified layer(s)
|
||||
|
||||
NOTE:
|
||||
The .bbappend file can impact the dependency.
|
||||
"""
|
||||
ignore_layers = (args.ignore or '').split(',')
|
||||
import optparse
|
||||
|
||||
parser = optparse.OptionParser(usage="show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]")
|
||||
parser.add_option("-f", "",
|
||||
action="store_true", dest="show_filenames")
|
||||
parser.add_option("-i", "",
|
||||
action="store", dest="ignore_layers", default="")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
ignore_layers = options.ignore_layers.split(',')
|
||||
|
||||
self.init_bbhandler()
|
||||
|
||||
@@ -814,7 +612,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data,
|
||||
self.bbhandler.cooker_data.pkg_pn)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], options.show_filenames, ignore_layers)
|
||||
|
||||
# The RDPENDS
|
||||
all_rdeps = self.bbhandler.cooker_data.rundeps[f].values()
|
||||
@@ -834,7 +632,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rdep,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
|
||||
# The RRECOMMENDS
|
||||
all_rrecs = self.bbhandler.cooker_data.runrecs[f].values()
|
||||
@@ -854,7 +652,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rrec,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
|
||||
# The inherit class
|
||||
cls_re = re.compile('classes/')
|
||||
@@ -869,7 +667,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
continue
|
||||
inherit_layername = self.get_file_layer(cls)
|
||||
if inherit_layername != layername and not inherit_layername in ignore_layers:
|
||||
if not args.filenames:
|
||||
if not options.show_filenames:
|
||||
f_short = self.remove_layer_prefix(f)
|
||||
cls = self.remove_layer_prefix(cls)
|
||||
else:
|
||||
@@ -889,7 +687,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr:
|
||||
pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1]
|
||||
needed_file = re.sub(r"\${PV}", pv, needed_file)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, options.show_filenames, ignore_layers)
|
||||
line = fnfile.readline()
|
||||
fnfile.close()
|
||||
|
||||
@@ -916,7 +714,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
bbclass=".bbclass"
|
||||
# Find a 'require/include xxxx'
|
||||
if m:
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, options.show_filenames, ignore_layers)
|
||||
line = ffile.readline()
|
||||
ffile.close()
|
||||
|
||||
@@ -956,84 +754,5 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
|
||||
logger.plain("%s %s %s" % (f, keyword, best_realfn))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
cmds = Commands()
|
||||
|
||||
def add_command(cmdname, function, *args, **kwargs):
|
||||
# Convert docstring for function to help (one-liner shown in main --help) and description (shown in subcommand --help)
|
||||
docsplit = function.__doc__.splitlines()
|
||||
help = docsplit[0]
|
||||
if len(docsplit) > 1:
|
||||
desc = '\n'.join(docsplit[1:])
|
||||
else:
|
||||
desc = help
|
||||
subparser = subparsers.add_parser(cmdname, *args, help=help, description=desc, formatter_class=argparse.RawTextHelpFormatter, **kwargs)
|
||||
subparser.set_defaults(func=function)
|
||||
return subparser
|
||||
|
||||
parser = argparse.ArgumentParser(description="BitBake layers utility",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
|
||||
parser_show_layers = add_command('show-layers', cmds.do_show_layers)
|
||||
|
||||
parser_add_layer = add_command('add-layer', cmds.do_add_layer)
|
||||
parser_add_layer.add_argument('layerdir', help='Layer directory to add')
|
||||
|
||||
parser_remove_layer = add_command('remove-layer', cmds.do_remove_layer)
|
||||
parser_remove_layer.add_argument('layerdir', help='Layer directory to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
parser_remove_layer.set_defaults(func=cmds.do_remove_layer)
|
||||
|
||||
parser_show_overlayed = add_command('show-overlayed', cmds.do_show_overlayed)
|
||||
parser_show_overlayed.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_overlayed.add_argument('-s', '--same-version', help='only list overlayed recipes where the version is the same', action='store_true')
|
||||
|
||||
parser_show_recipes = add_command('show-recipes', cmds.do_show_recipes)
|
||||
parser_show_recipes.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_recipes.add_argument('-m', '--multiple', help='only list where multiple recipes (in the same layer or different layers) exist for the same recipe name', action='store_true')
|
||||
parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
|
||||
parser_show_appends = add_command('show-appends', cmds.do_show_appends)
|
||||
|
||||
parser_flatten = add_command('flatten', cmds.do_flatten)
|
||||
parser_flatten.add_argument('layer', nargs='*', help='Optional layer(s) to flatten (otherwise all are flattened)')
|
||||
parser_flatten.add_argument('outputdir', help='Output directory')
|
||||
|
||||
parser_show_cross_depends = add_command('show-cross-depends', cmds.do_show_cross_depends)
|
||||
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
|
||||
parser_show_cross_depends.add_argument('-i', '--ignore', help='ignore dependencies on items in the specified layer(s) (split multiple layer names with commas, no spaces)', metavar='LAYERNAME')
|
||||
|
||||
parser_layerindex_fetch = add_command('layerindex-fetch', cmds.do_layerindex_fetch)
|
||||
parser_layerindex_fetch.add_argument('-n', '--show-only', help='show dependencies and do nothing else', action='store_true')
|
||||
parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
|
||||
parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
|
||||
|
||||
parser_layerindex_show_depends = add_command('layerindex-show-depends', cmds.do_layerindex_show_depends)
|
||||
parser_layerindex_show_depends.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_show_depends.add_argument('layername', nargs='+', help='layer to query')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
ret = args.func(args)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc(5)
|
||||
sys.exit(ret)
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]) or 0)
|
||||
|
||||
@@ -26,30 +26,24 @@ except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
def usage():
|
||||
print('usage: [BB_SKIP_NETTESTS=yes] %s [-v] [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
print('usage: %s [testname1 [testname2]...]' % os.path.basename(sys.argv[0]))
|
||||
|
||||
verbosity = 1
|
||||
|
||||
tests = sys.argv[1:]
|
||||
if '-v' in sys.argv:
|
||||
tests.remove('-v')
|
||||
verbosity = 2
|
||||
|
||||
if tests:
|
||||
if len(sys.argv) > 1:
|
||||
if '--help' in sys.argv[1:]:
|
||||
usage()
|
||||
sys.exit(0)
|
||||
|
||||
tests = sys.argv[1:]
|
||||
else:
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.parse",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
t = '.'.join(t.split('.')[:3])
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests, verbosity=verbosity)
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
|
||||
@@ -24,15 +24,6 @@ if sys.argv[1] == "decafbadbad":
|
||||
except:
|
||||
import profile
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
|
||||
@@ -34,7 +34,7 @@ from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
# (c) 2013 Intel Corp.
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
@@ -28,87 +28,78 @@
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
webserverKillAll()
|
||||
function webserverKillAll()
|
||||
{
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
pid=`cat ${pidfile}`
|
||||
while kill -0 $pid 2>/dev/null; do
|
||||
kill -SIGTERM -$pid 2>/dev/null
|
||||
sleep 1
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
local pidfile
|
||||
for pidfile in ${BUILDDIR}/.toastermain.pid; do
|
||||
if [ -f ${pidfile} ]; then
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
webserverStartAll()
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
if [ "$TOASTER_MANAGED" '=' '1' ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
else
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb --noinput || retval=1
|
||||
fi
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
printf "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "$TOASTER_MANAGED" = '1' ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
if [ $retval -eq 1 ]; then
|
||||
echo "Failed db sync, stopping system start" 1>&2
|
||||
elif [ $retval -eq 2 ]; then
|
||||
echo -e "\nError on migration, trying to recover... \n"
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm 0001_initial --fake
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "x$TOASTER_MANAGED" == "x1" ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver"
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
}
|
||||
|
||||
# Helper functions to add a special configuration file
|
||||
|
||||
addtoConfiguration()
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
|
||||
# define the stop command
|
||||
stop_system()
|
||||
function stop_system()
|
||||
{
|
||||
# prevent reentry
|
||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
||||
if [ $INSTOPSYSTEM == 1 ]; then return; fi
|
||||
INSTOPSYSTEM=1
|
||||
if [ -f ${BUILDDIR}/.toasterui.pid ]; then
|
||||
kill `cat ${BUILDDIR}/.toasterui.pid` 2>/dev/null
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
BBSERVER=0.0.0.0:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# force stop any misbehaving bitbake server
|
||||
@@ -118,54 +109,93 @@ stop_system()
|
||||
INSTOPSYSTEM=0
|
||||
}
|
||||
|
||||
check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 `cat $1` 2>/dev/null
|
||||
function check_pidbyfile() {
|
||||
[ -e $1 ] && kill -0 $(< $1) 2>/dev/null
|
||||
}
|
||||
|
||||
|
||||
notify_chldexit() {
|
||||
if [ $NOTOASTERUI -eq 0 ]; then
|
||||
function notify_chldexit() {
|
||||
if [ $NOTOASTERUI == 0 ]; then
|
||||
check_pidbyfile ${BUILDDIR}/.toasterui.pid && return
|
||||
stop_system
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
verify_prereq() {
|
||||
# Verify prerequisites
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
echo -e "Error: build/ directory detected. Toaster will not start in managed mode if a build environment is detected.\nUse a clean terminal to start Toaster." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
printf "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp
|
||||
RUNNING=1
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# read command line parameters
|
||||
if [ -n "$BASH_SOURCE" ] ; then
|
||||
TOASTER=${BASH_SOURCE}
|
||||
elif [ -n "$ZSH_NAME" ] ; then
|
||||
TOASTER=${(%):-%x}
|
||||
else
|
||||
TOASTER=$0
|
||||
xdg-open http://0.0.0.0:8000/ >/dev/null 2>&1 &
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Running. Stop with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds
|
||||
sleep 1
|
||||
done
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
BBBASEDIR=`dirname $TOASTER`/..
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
RUNNING=0
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
NOBROWSER=0
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
@@ -174,170 +204,54 @@ for param in $*; do
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
nobrowser )
|
||||
NOBROWSER=1
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
[ -n "${BASH_SOURCE}" ] && SRCFILE=${BASH_SOURCE} || SRCFILE=$_
|
||||
|
||||
if [ `basename \"$0\"` = `basename \"${SRCFILE}\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
printf "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp/toaster_$$
|
||||
if [ -d "$BUILDDIR" ]; then
|
||||
echo "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$BUILDDIR"
|
||||
|
||||
RUNNING=1
|
||||
trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
|
||||
do_cleanup() {
|
||||
find "$BUILDDIR" -type f | xargs rm
|
||||
rmdir "$BUILDDIR"
|
||||
}
|
||||
cleanup() {
|
||||
if grep -ir error "$BUILDDIR" >/dev/null; then
|
||||
if grep -irn "That port is already in use" "$BUILDDIR"; then
|
||||
echo "You can use the \"webport=PORTNUMBER\" parameter to start Toaster on a different port (port $WEB_PORT is already in use)"
|
||||
do_cleanup
|
||||
else
|
||||
printf "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
fi
|
||||
else
|
||||
echo "No errors found, removing the run directory '$BUILDDIR'"
|
||||
do_cleanup
|
||||
fi
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2
|
||||
cleanup
|
||||
exit 1
|
||||
fi
|
||||
if [ $WEBSERVER -gt 0 ] && [ $NOBROWSER -eq 0 ] ; then
|
||||
echo "Starting browser..."
|
||||
xdg-open http://127.0.0.1:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
fi
|
||||
trap trap_ctrlc 2
|
||||
echo "Toaster is now running. You can stop it with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds 2>&1 | tee -a "$BUILDDIR/toaster.log"
|
||||
sleep 1
|
||||
done
|
||||
cleanup
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "$1" = 'start' ] || [ "$1" = 'stop' ]; then
|
||||
CMD="$1"
|
||||
else
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
|
||||
lock=1
|
||||
if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
python -c "import fcntl; fcntl.flock(open(\"$BUILDDIR/bitbake.lock\"), fcntl.LOCK_EX|fcntl.LOCK_NB)" 2>/dev/null || lock=0
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} = 'start' ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
start_success=1
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
unset BBSERVER
|
||||
PREREAD=""
|
||||
if [ -e ${BUILDDIR}/conf/toaster-pre.conf ]; then
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
fi
|
||||
bitbake $PREREAD --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
if [ $NOTOASTERUI -eq 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >>${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
export BBSERVER=0.0.0.0:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
@@ -350,3 +264,4 @@ case $CMD in
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program and set exit code to the returned value
|
||||
sys.exit(toasterui.main(mock_connection.connection, mock_connection.events, configParams))
|
||||
@@ -11,7 +11,7 @@
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# The Makefile generates an HTML and PDF version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
@@ -21,8 +21,8 @@
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
# The first example generates the HTML and PDF versions of the User Manual.
|
||||
# The second example generates the HTML version only of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@@ -31,9 +31,9 @@ XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
<?xml version='1.0'?>
|
||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
||||
|
||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<!--
|
||||
|
||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
||||
|
||||
-->
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
The execution process is launched using the following command
|
||||
form:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
For information on the BitBake command and its options,
|
||||
see
|
||||
@@ -37,16 +37,14 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common method to determine this value for your build host is to run
|
||||
the following:
|
||||
A common way to determine this value for your build host is to run:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
This command returns the number of processors, which takes into
|
||||
account hyper-threading.
|
||||
Thus, a quad-core build host with hyper-threading most likely
|
||||
shows eight processors, which is the value you would then assign to
|
||||
<filename>BB_NUMBER_THREADS</filename>.
|
||||
and count the number of processors displayed. Note that the number of
|
||||
processors will take into account hyper-threading, so that a quad-core
|
||||
build host with hyper-threading will most likely show eight processors,
|
||||
which is the value you would then assign to that variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -287,8 +285,8 @@
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
<filename>PN</filename> to "something" and
|
||||
@@ -784,13 +782,13 @@
|
||||
make some dependency and hash information available to the build.
|
||||
This information includes:
|
||||
<itemizedlist>
|
||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_task-<taskname></filename>:
|
||||
The base hashes for each task in the recipe.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BB_BASEHASH_<filename:taskname></filename>:
|
||||
The base hashes for each dependent task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
||||
<listitem><para><filename>BBHASHDEPS_<filename:taskname></filename>:
|
||||
The task dependencies for each task.
|
||||
</para></listitem>
|
||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
||||
|
||||
@@ -157,8 +157,8 @@
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
varflags as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
||||
SRC_URI[md5sum] = "value"
|
||||
SRC_URI[sha256sum] = "value"
|
||||
</literallayout>
|
||||
You can also specify the checksums as parameters on the
|
||||
<filename>SRC_URI</filename> as shown below:
|
||||
@@ -325,25 +325,6 @@
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
<note>
|
||||
Because URL parameters are delimited by semi-colons, this can
|
||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
||||
for example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
||||
</literallayout>
|
||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
||||
</literallayout>
|
||||
In most cases this should work. Treating semi-colons and '&' in queries
|
||||
identically is recommended by the World Wide Web Consortium (W3C).
|
||||
Note that due to the nature of the URL, you may have to specify the name
|
||||
of the downloaded file as well:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
||||
</literallayout>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
@@ -364,7 +345,7 @@
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
<listitem><para><emphasis><filename>CVSDIR</filename>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
@@ -413,8 +394,7 @@
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
directory relative to <filename>CVSDIR</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
@@ -455,9 +435,9 @@
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
The fetcher's temporary working directory is set
|
||||
by <filename>SVNDIR</filename>, which is usually
|
||||
<filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -511,9 +491,8 @@
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git2</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
@@ -647,7 +626,7 @@
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
@@ -665,19 +644,13 @@
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB.
|
||||
<note>
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the <filename>load</filename> rule in
|
||||
the view config spec.
|
||||
As an example, consider the <filename>vob</filename> and
|
||||
<filename>module</filename> values from the
|
||||
<filename>SRC_URI</filename> statement at the start of this section.
|
||||
Combining those values results in the following:
|
||||
<literallayout class='monospaced'>
|
||||
load /example_vob/example_module
|
||||
</literallayout>
|
||||
</note>
|
||||
prepending "/" character, in the selected VOB
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the following load rule in
|
||||
the view config spec:
|
||||
<literallayout class='monospaced'>
|
||||
load <vob><module>
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
|
||||
@@ -135,7 +135,7 @@
|
||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
<ulink url="https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</note>
|
||||
@@ -221,7 +221,7 @@
|
||||
<para>From your shell, enter the following commands to set and
|
||||
export the <filename>BBPATH</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
||||
$ BBPATH="<projectdirectory>"
|
||||
$ export BBPATH
|
||||
</literallayout>
|
||||
Use your actual project directory in the command.
|
||||
@@ -270,7 +270,7 @@
|
||||
and define some key BitBake variables.
|
||||
For more information on the <filename>bitbake.conf</filename>,
|
||||
see
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
</para>
|
||||
<para>Use the following commands to create the <filename>conf</filename>
|
||||
directory in the project directory:
|
||||
@@ -355,7 +355,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
supporting.
|
||||
For more information on the <filename>base.bbclass</filename> file,
|
||||
you can look at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>classes/base.bbclass</filename>
|
||||
@@ -377,7 +377,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Thus, this example creates and uses a layer called "mylayer".
|
||||
<note>
|
||||
You can find additional information on adding a layer at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
<para>Minimally, you need a recipe file and a layer configuration
|
||||
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -327,8 +327,8 @@
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
@@ -952,7 +952,7 @@
|
||||
<listitem><para>
|
||||
The class needs to define the function as follows:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>classname</replaceable><filename>_</filename><replaceable>functionname</replaceable>
|
||||
<classname>_<functionname>
|
||||
</literallayout>
|
||||
For example, if you have a class file
|
||||
<filename>bar.bbclass</filename> and a function named
|
||||
@@ -966,7 +966,7 @@
|
||||
The class needs to contain the <filename>EXPORT_FUNCTIONS</filename>
|
||||
statement as follows:
|
||||
<literallayout class='monospaced'>
|
||||
EXPORT_FUNCTIONS <replaceable>functionname</replaceable>
|
||||
EXPORT_FUNCTIONS <functionname>
|
||||
</literallayout>
|
||||
For example, continuing with the same example, the
|
||||
statement in the <filename>bar.bbclass</filename> would be
|
||||
@@ -1065,41 +1065,13 @@
|
||||
<title>Deleting a Task</title>
|
||||
|
||||
<para>
|
||||
As well as being able to add tasks, you can delete them.
|
||||
Simply use the <filename>deltask</filename> command to
|
||||
delete a task.
|
||||
As well as being able to add tasks, tasks can also be deleted.
|
||||
This is done simply with <filename>deltask</filename> command.
|
||||
For example, to delete the example task used in the previous
|
||||
sections, you would use:
|
||||
<literallayout class='monospaced'>
|
||||
deltask printdate
|
||||
</literallayout>
|
||||
If you delete a task using the <filename>deltask</filename>
|
||||
command and the task has dependencies, the dependencies are
|
||||
not reconnected.
|
||||
For example, suppose you have three tasks named
|
||||
<filename>do_a</filename>, <filename>do_b</filename>, and
|
||||
<filename>do_c</filename>.
|
||||
Furthermore, <filename>do_c</filename> is dependent on
|
||||
<filename>do_b</filename>, which in turn is dependent on
|
||||
<filename>do_a</filename>.
|
||||
Given this scenario, if you use <filename>deltask</filename>
|
||||
to delete <filename>do_b</filename>, the implicit dependency
|
||||
relationship between <filename>do_c</filename> and
|
||||
<filename>do_a</filename> through <filename>do_b</filename>
|
||||
no longer exists, and <filename>do_c</filename> dependencies
|
||||
are not updated to include <filename>do_a</filename>.
|
||||
Thus, <filename>do_c</filename> is free to run before
|
||||
<filename>do_a</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you want dependencies such as these to remain intact, use
|
||||
the <filename>noexec</filename> varflag to disable the task
|
||||
instead of using the <filename>deltask</filename> command to
|
||||
delete it:
|
||||
<literallayout class='monospaced'>
|
||||
do_b[noexec] = "1"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1163,11 +1135,13 @@
|
||||
<para>
|
||||
The <filename>BB_ORIGENV</filename> variable returns a datastore
|
||||
object that can be queried using the standard datastore operators
|
||||
such as <filename>getVar(, False)</filename>.
|
||||
such as <filename>getVar()</filename>.
|
||||
The datastore object is useful, for example, to find the original
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BB_ORIGENV - add example?
|
||||
|
||||
origenv = d.getVar("BB_ORIGENV", False)
|
||||
bar = origenv.getVar("BAR", False)
|
||||
</literallayout>
|
||||
@@ -1192,7 +1166,7 @@
|
||||
BitBake reads and writes varflags to the datastore using the following
|
||||
command forms:
|
||||
<literallayout class='monospaced'>
|
||||
<replaceable>variable</replaceable> = d.getVarFlags("<replaceable>variable</replaceable>")
|
||||
<variable> = d.getVarFlags("<variable>")
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
</literallayout>
|
||||
</para>
|
||||
@@ -1213,36 +1187,11 @@
|
||||
Tasks support a number of these flags which control various
|
||||
functionality of the task:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>dirs:</emphasis>
|
||||
Directories that should be created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>lockfiles:</emphasis>
|
||||
Specifies one or more lockfiles to lock while the task
|
||||
executes.
|
||||
Only one task may hold a lockfile, and any task that
|
||||
attempts to lock an already locked file will block until
|
||||
the lock is released.
|
||||
You can use this variable flag to accomplish mutual
|
||||
exclusion.
|
||||
<listitem><para><emphasis>cleandirs:</emphasis>
|
||||
Empty directories that should created before the task runs.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>noexec:</emphasis>
|
||||
Marks the tasks as being empty and no execution required.
|
||||
@@ -1254,20 +1203,15 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
<listitem><para><emphasis>deptask:</emphasis>
|
||||
Controls task build-time dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='build-dependencies'>Build Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdeptask:</emphasis>
|
||||
@@ -1280,11 +1224,6 @@
|
||||
"<link linkend='runtime-dependencies'>Runtime Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recideptask:</emphasis>
|
||||
When set in conjunction with
|
||||
<filename>recrdeptask</filename>, specifies a task that
|
||||
should be inspected for additional dependencies.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>recrdeptask:</emphasis>
|
||||
Controls task recursive runtime dependencies.
|
||||
See the
|
||||
@@ -1295,14 +1234,35 @@
|
||||
"<link linkend='recursive-dependencies'>Recursive Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>depends:</emphasis>
|
||||
Controls inter-task dependencies.
|
||||
See the
|
||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>
|
||||
variable and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>rdepends:</emphasis>
|
||||
Controls inter-task runtime dependencies.
|
||||
See the
|
||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
|
||||
variable, the
|
||||
<link linkend='var-RRECOMMENDS'><filename>RRECOMMENDS</filename></link>
|
||||
variable, and the
|
||||
"<link linkend='inter-task-dependencies'>Inter-Task Dependencies</link>"
|
||||
section for more information.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>postfuncs:</emphasis>
|
||||
List of functions to call after the completion of the task.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>prefuncs:</emphasis>
|
||||
List of functions to call before the task executes.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>stamp-extra-info:</emphasis>
|
||||
Extra stamp information to append to the task's stamp.
|
||||
As an example, OpenEmbedded uses this flag to allow
|
||||
machine-specific tasks.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -1322,16 +1282,16 @@
|
||||
does not allow BitBake to automatically determine
|
||||
that the variable is referred to.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalue:</emphasis>
|
||||
If set, instructs BitBake to ignore the actual
|
||||
value of the variable and instead use the specified
|
||||
value when calculating the variable's signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepsexclude:</emphasis>
|
||||
Specifies a space-separated list of variables
|
||||
that should be excluded from a variable's dependencies
|
||||
for the purposes of calculating its signature.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>vardepvalueexclude:</emphasis>
|
||||
Specifies a pipe-separated list of strings to exclude
|
||||
from the variable's value when calculating the
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
<link linkend='var-DEFAULT_PREFERENCE'>D</link>
|
||||
<link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
|
||||
<link linkend='var-FAKEROOT'>F</link>
|
||||
<link linkend='var-GITDIR'>G</link>
|
||||
<link linkend='var-HGDIR'>H</link>
|
||||
<!-- <link linkend='var-GROUPADD_PARAM'>G</link> -->
|
||||
<link linkend='var-HOMEPAGE'>H</link>
|
||||
<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
|
||||
<!-- <link linkend='var-glossary-j'>J</link> -->
|
||||
<!-- <link linkend='var-KARCH'>K</link> -->
|
||||
@@ -102,56 +102,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_ALLOWED_NETWORKS'><glossterm>BB_ALLOWED_NETWORKS</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Specifies a space-delimited list of hosts that the fetcher
|
||||
is allowed to use to obtain the required source code.
|
||||
Following are considerations surrounding this variable:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
This host list is only used if
|
||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
||||
is either not set or set to "0".
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Limited support for wildcard matching against the
|
||||
beginning of host names exists.
|
||||
For example, the following setting matches
|
||||
<filename>git.gnu.org</filename>,
|
||||
<filename>ftp.gnu.org</filename>, and
|
||||
<filename>foo.git.gnu.org</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
BB_ALLOWED_NETWORKS = "*.gnu.org"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Mirrors not in the host list are skipped and
|
||||
logged in debug.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Attempts to access networks not in the host list
|
||||
cause a failure.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Using <filename>BB_ALLOWED_NETWORKS</filename> in
|
||||
conjunction with
|
||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
||||
is very useful.
|
||||
Adding the host you want to use to
|
||||
<filename>PREMIRRORS</filename> results in the source code
|
||||
being fetched from an allowed location and avoids raising
|
||||
an error when a host that is not allowed is in a
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
statement.
|
||||
This is because the fetcher does not attempt to use the
|
||||
host listed in <filename>SRC_URI</filename> after a
|
||||
successful fetch from the
|
||||
<filename>PREMIRRORS</filename> occurs.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BB_CONSOLELOG'><glossterm>BB_CONSOLELOG</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -922,7 +872,7 @@
|
||||
that run on the target <filename>MACHINE</filename>;
|
||||
"nativesdk", which targets the SDK machine instead of
|
||||
<filename>MACHINE</filename>; and "mulitlibs" in the form
|
||||
"<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
|
||||
"<filename>multilib:<multilib_name></filename>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -934,7 +884,7 @@
|
||||
metadata:
|
||||
<literallayout class='monospaced'>
|
||||
BBCLASSEXTEND =+ "native nativesdk"
|
||||
BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
|
||||
BBCLASSEXTEND =+ "multilib:<multilib_name>"
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1066,20 +1016,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBLAYERS_FETCH_DIR'><glossterm>BBLAYERS_FETCH_DIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
Sets the base location where layers are stored.
|
||||
By default, this location is set to
|
||||
<filename>${COREBASE}</filename>.
|
||||
This setting is used in conjunction with
|
||||
<filename>bitbake-layers layerindex-fetch</filename> and
|
||||
tells <filename>bitbake-layers</filename> where to place
|
||||
the fetched layers.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
@@ -1155,9 +1091,9 @@
|
||||
Set the variable as you would any environment variable
|
||||
and then run BitBake:
|
||||
<literallayout class='monospaced'>
|
||||
$ BBPATH="<replaceable>build_directory</replaceable>"
|
||||
$ BBPATH="<build_directory>"
|
||||
$ export BBPATH
|
||||
$ bitbake <replaceable>target</replaceable>
|
||||
$ bitbake <target>
|
||||
</literallayout>
|
||||
</para>
|
||||
</glossdef>
|
||||
@@ -1218,15 +1154,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Bazaar
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-c'><title>C</title>
|
||||
@@ -1241,15 +1168,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out under the
|
||||
CVS system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-d'><title>D</title>
|
||||
@@ -1494,32 +1412,13 @@
|
||||
|
||||
</glossdiv>
|
||||
|
||||
|
||||
<!--
|
||||
<glossdiv id='var-glossary-g'><title>G</title>
|
||||
|
||||
<glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Git repository
|
||||
is stored when it is cloned.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
-->
|
||||
|
||||
<glossdiv id='var-glossary-h'><title>H</title>
|
||||
|
||||
<glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Mercurial
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
|
||||
<glossdef>
|
||||
<para>Website where more information about the software the recipe is building
|
||||
@@ -1952,7 +1851,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RDEPENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RDEPENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2018,7 +1917,7 @@
|
||||
Here is the general syntax to specify versions with
|
||||
the <filename>RRECOMMENDS</filename> variable:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
|
||||
RRECOMMENDS_${PN} = "<package> (<operator> <version>)"
|
||||
</literallayout>
|
||||
For <filename>operator</filename>, you can specify the
|
||||
following:
|
||||
@@ -2201,15 +2100,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Subversion
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-t'><title>T</title>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.27.1"
|
||||
__version__ = "1.24.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
@@ -94,11 +94,11 @@ def note(*args):
|
||||
def warn(*args):
|
||||
logger.warn(''.join(args))
|
||||
|
||||
def error(*args, **kwargs):
|
||||
logger.error(''.join(args), extra=kwargs)
|
||||
def error(*args):
|
||||
logger.error(''.join(args))
|
||||
|
||||
def fatal(*args, **kwargs):
|
||||
logger.critical(''.join(args), extra=kwargs)
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
raise BBHandledException()
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
|
||||
@@ -31,7 +31,6 @@ import logging
|
||||
import shlex
|
||||
import glob
|
||||
import time
|
||||
import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
@@ -43,20 +42,6 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
__mtime_cache = {}
|
||||
|
||||
def cached_mtime_noerror(f):
|
||||
if f not in __mtime_cache:
|
||||
try:
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return __mtime_cache[f]
|
||||
|
||||
def reset_cache():
|
||||
global __mtime_cache
|
||||
__mtime_cache = {}
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# If we do not do this and use the exec globals, they will
|
||||
@@ -159,7 +144,7 @@ class LogTee(object):
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
|
||||
body = d.getVar(func, False)
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
@@ -313,7 +298,7 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
# cleanup
|
||||
ret=$?
|
||||
trap '' 0
|
||||
exit $ret
|
||||
exit $?
|
||||
''')
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
@@ -329,52 +314,14 @@ exit $ret
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
def readfifo(data):
|
||||
lines = data.split('\0')
|
||||
for line in lines:
|
||||
splitval = line.split(' ', 1)
|
||||
cmd = splitval[0]
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1]
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
bb.plain(value)
|
||||
elif cmd == 'bbnote':
|
||||
bb.note(value)
|
||||
elif cmd == 'bbwarn':
|
||||
bb.warn(value)
|
||||
elif cmd == 'bberror':
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal':
|
||||
# The caller will call exit themselves, so bb.error() is
|
||||
# what we want here rather than bb.fatal()
|
||||
bb.error(value)
|
||||
elif cmd == 'bbfatal_log':
|
||||
bb.error(value, forcelog=True)
|
||||
elif cmd == 'bbdebug':
|
||||
splitval = value.split(' ', 1)
|
||||
level = int(splitval[0])
|
||||
value = splitval[1]
|
||||
bb.debug(level, value)
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
tempdir = d.getVar('T', True)
|
||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
||||
if os.path.exists(fifopath):
|
||||
os.unlink(fifopath)
|
||||
os.mkfifo(fifopath)
|
||||
with open(fifopath, 'r+') as fifo:
|
||||
try:
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
finally:
|
||||
os.unlink(fifopath)
|
||||
try:
|
||||
with open(os.devnull, 'r+') as stdin:
|
||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
|
||||
@@ -448,10 +395,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
self.triggered = False
|
||||
logging.Handler.__init__(self, logging.ERROR)
|
||||
def emit(self, record):
|
||||
if getattr(record, 'forcelog', False):
|
||||
self.triggered = False
|
||||
else:
|
||||
self.triggered = True
|
||||
self.triggered = True
|
||||
|
||||
# Handle logfiles
|
||||
si = open('/dev/null', 'r')
|
||||
@@ -591,7 +535,7 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if cached_mtime_noerror(stampdir) == 0:
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
|
||||
return stamp
|
||||
@@ -687,7 +631,7 @@ def stampfile(taskname, d, file_name = None):
|
||||
return stamp_internal(taskname, d, file_name)
|
||||
|
||||
def add_tasks(tasklist, deltasklist, d):
|
||||
task_deps = d.getVar('_task_deps', False)
|
||||
task_deps = d.getVar('_task_deps')
|
||||
if not task_deps:
|
||||
task_deps = {}
|
||||
if not 'tasks' in task_deps:
|
||||
@@ -737,7 +681,7 @@ def addtask(task, before, after, d):
|
||||
task = "do_" + task
|
||||
|
||||
d.setVarFlag(task, "task", 1)
|
||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
||||
bbtasks = d.getVar('__BBTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBTASKS', bbtasks)
|
||||
@@ -760,7 +704,7 @@ def deltask(task, d):
|
||||
if task[:3] != "do_":
|
||||
task = "do_" + task
|
||||
|
||||
bbtasks = d.getVar('__BBDELTASKS', False) or []
|
||||
bbtasks = d.getVar('__BBDELTASKS') or []
|
||||
if not task in bbtasks:
|
||||
bbtasks.append(task)
|
||||
d.setVar('__BBDELTASKS', bbtasks)
|
||||
|
||||
@@ -659,25 +659,25 @@ class Cache(object):
|
||||
"""
|
||||
chdir_back = False
|
||||
|
||||
from bb import parse
|
||||
from bb import data, parse
|
||||
|
||||
# expand tmpdir to include this topdir
|
||||
config.setVar('TMPDIR', config.getVar('TMPDIR', True) or "")
|
||||
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||
oldpath = os.path.abspath(os.getcwd())
|
||||
parse.cached_mtime_noerror(bbfile_loc)
|
||||
bb_data = config.createCopy()
|
||||
bb_data = data.init_db(config)
|
||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||
# then it would call getcwd().
|
||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||
# and finally chdir()ed back, a couple of thousand times. We now
|
||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||
if not bb_data.getVar('TOPDIR', False):
|
||||
if not data.getVar('TOPDIR', bb_data):
|
||||
chdir_back = True
|
||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
||||
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||
try:
|
||||
if appends:
|
||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
||||
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||
bb_data = parse.handle(bbfile, bb_data)
|
||||
if chdir_back:
|
||||
os.chdir(oldpath)
|
||||
|
||||
@@ -92,9 +92,6 @@ class pythonCacheLine(object):
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
def __repr__(self):
|
||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
||||
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
@@ -108,8 +105,6 @@ class shellCacheLine(object):
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
def __repr__(self):
|
||||
return str(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
@@ -183,7 +178,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -245,9 +240,6 @@ class PythonParser():
|
||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||
|
||||
def parse_python(self, node):
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
|
||||
@@ -123,11 +123,11 @@ class Command:
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
if msg or msg == "":
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.event_data)
|
||||
elif code:
|
||||
bb.event.fire(CommandExit(code), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandExit(code), self.cooker.event_data)
|
||||
else:
|
||||
bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandCompleted(), self.cooker.event_data)
|
||||
self.currentAsyncCommand = None
|
||||
self.cooker.finishcommand()
|
||||
|
||||
@@ -273,8 +273,7 @@ class CommandsSync:
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
command.cooker.updateConfigOpts(options)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@ from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import signal
|
||||
import subprocess
|
||||
@@ -153,7 +153,9 @@ class BBCooker:
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
if not self.lockBitbake():
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
if not self.lock:
|
||||
bb.fatal("Only one copy of bitbake should be run against a build directory")
|
||||
try:
|
||||
self.lock.seek(0)
|
||||
@@ -197,7 +199,7 @@ class BBCooker:
|
||||
if not watcher:
|
||||
watcher = self.watcher
|
||||
for i in deps:
|
||||
f = os.path.dirname(i[0])
|
||||
f = i[0]
|
||||
if f in watcher.bbseen:
|
||||
continue
|
||||
watcher.bbseen.append(f)
|
||||
@@ -213,11 +215,6 @@ class BBCooker:
|
||||
f = os.path.dirname(f)
|
||||
watcher.bbseen.append(f)
|
||||
continue
|
||||
if 'ENOSPC' in str(e):
|
||||
providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
|
||||
providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
|
||||
providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
|
||||
providerlog.error("Root privilege is required to modify max_user_watches.")
|
||||
raise
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
@@ -229,13 +226,13 @@ class BBCooker:
|
||||
|
||||
def setFeatures(self, features):
|
||||
# we only accept a new feature set if we're in state initial, so we can reset without problems
|
||||
if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
|
||||
if self.state != state.initial:
|
||||
raise Exception("Illegal state for feature set change")
|
||||
original_featureset = list(self.featureset)
|
||||
for feature in features:
|
||||
self.featureset.setFeature(feature)
|
||||
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
|
||||
if (original_featureset != list(self.featureset)) and self.state != state.error:
|
||||
if (original_featureset != list(self.featureset)):
|
||||
self.reset()
|
||||
|
||||
def initConfigurationData(self):
|
||||
@@ -269,90 +266,16 @@ class BBCooker:
|
||||
self.data = self.databuilder.data
|
||||
self.data_hash = self.databuilder.data_hash
|
||||
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
import json, pickle
|
||||
DEFAULT_EVENTFILE = self.configuration.writeeventlog
|
||||
class EventLogWriteHandler():
|
||||
|
||||
class EventWriter():
|
||||
def __init__(self, cooker):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.event_queue = []
|
||||
|
||||
def init_file(self):
|
||||
try:
|
||||
# delete the old log
|
||||
os.remove(DEFAULT_EVENTFILE)
|
||||
except:
|
||||
pass
|
||||
|
||||
# write current configuration data
|
||||
with open(DEFAULT_EVENTFILE, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
def write_event(self, event):
|
||||
with open(DEFAULT_EVENTFILE, "a") as f:
|
||||
try:
|
||||
f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(e, traceback.format_exc(e))
|
||||
|
||||
|
||||
def send(self, event):
|
||||
event_class = event.__module__ + "." + event.__class__.__name__
|
||||
|
||||
# init on bb.event.BuildStarted
|
||||
if self.file_inited is None:
|
||||
if event_class == "bb.event.BuildStarted":
|
||||
self.init_file()
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for e in self.event_queue:
|
||||
self.write_event(e)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
else:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
|
||||
# set our handler's event processor
|
||||
event = EventWriter(self) # self is the cooker here
|
||||
|
||||
|
||||
# set up cooker features for this mock UI handler
|
||||
|
||||
# we need to write the dependency tree in the log
|
||||
self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
|
||||
# register the log file writer as UI Handler
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler())
|
||||
|
||||
|
||||
#
|
||||
# Copy of the data store which has been expanded.
|
||||
# Used for firing events and accessing variables where expansion needs to be accounted for
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.expanded_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.expanded_data)
|
||||
bb.parse.init_parser(self.expanded_data)
|
||||
self.event_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.event_data)
|
||||
bb.parse.init_parser(self.event_data)
|
||||
|
||||
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
|
||||
self.disableDataTracking()
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends", False), self.configwatcher)
|
||||
|
||||
|
||||
def enableDataTracking(self):
|
||||
self.configuration.tracking = True
|
||||
if hasattr(self, "data"):
|
||||
@@ -383,14 +306,14 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
|
||||
self.data.appendVar(var, val, **loginfo)
|
||||
|
||||
def saveConfigurationVar(self, var, val, default_file, op):
|
||||
|
||||
replaced = False
|
||||
#do not save if nothing changed
|
||||
if str(val) == self.data.getVar(var, False):
|
||||
if str(val) == self.data.getVar(var):
|
||||
return
|
||||
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
@@ -402,7 +325,7 @@ class BBCooker:
|
||||
listval += "%s " % value
|
||||
val = listval
|
||||
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
#comment or replace operations made on var
|
||||
for conf_file in conf_files:
|
||||
@@ -452,12 +375,12 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
|
||||
self.data.setVar(var, val, **loginfo)
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
conf_files = self.data.varhistory.get_variable_files(var)
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
|
||||
for conf_file in conf_files:
|
||||
if topdir in conf_file:
|
||||
@@ -497,7 +420,7 @@ class BBCooker:
|
||||
|
||||
def parseConfiguration(self):
|
||||
# Set log file verbosity
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", False))
|
||||
verboselogs = bb.utils.to_boolean(self.data.getVar("BB_VERBOSE_LOGS", "0"))
|
||||
if verboselogs:
|
||||
bb.msg.loggerVerboseLogs = True
|
||||
|
||||
@@ -514,29 +437,9 @@ class BBCooker:
|
||||
|
||||
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def updateConfigOpts(self, options, environment):
|
||||
def updateConfigOpts(self,options):
|
||||
for o in options:
|
||||
setattr(self.configuration, o, options[o])
|
||||
clean = True
|
||||
for k in bb.utils.approved_variables():
|
||||
if k in environment and k not in self.configuration.env:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if k in self.configuration.env and k not in environment:
|
||||
logger.debug(1, "Updating environment variable %s (deleted)" % (k))
|
||||
del self.configuration.env[k]
|
||||
clean = False
|
||||
if k not in self.configuration.env and k not in environment:
|
||||
continue
|
||||
if environment[k] != self.configuration.env[k]:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if not clean:
|
||||
logger.debug(1, "Base environment change, triggering reparse")
|
||||
self.baseconfig_valid = False
|
||||
self.reset()
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
@@ -583,11 +486,11 @@ class BBCooker:
|
||||
fn = self.matchFile(fn)
|
||||
fn = bb.cache.Cache.realfn2virtual(fn, cls)
|
||||
elif len(pkgs_to_build) == 1:
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
if pkgs_to_build[0] in set(ignore.split()):
|
||||
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
|
||||
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort)
|
||||
|
||||
targetid = taskdata.getbuild_id(pkgs_to_build[0])
|
||||
fnid = taskdata.build_targets[targetid][0]
|
||||
@@ -617,10 +520,10 @@ class BBCooker:
|
||||
data.expandKeys(envdata)
|
||||
for e in envdata.keys():
|
||||
if data.getVarFlag( e, 'python', envdata ):
|
||||
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, True))
|
||||
logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
|
||||
|
||||
|
||||
def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False):
|
||||
def buildTaskData(self, pkgs_to_build, task, abort):
|
||||
"""
|
||||
Prepare a runqueue and taskdata object for iteration over pkgs_to_build
|
||||
"""
|
||||
@@ -635,7 +538,7 @@ class BBCooker:
|
||||
localdata = data.createCopy(self.data)
|
||||
bb.data.update_data(localdata)
|
||||
bb.data.expandKeys(localdata)
|
||||
taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete)
|
||||
taskdata = bb.taskdata.TaskData(abort, skiplist=self.skiplist)
|
||||
|
||||
current = 0
|
||||
runlist = []
|
||||
@@ -660,10 +563,10 @@ class BBCooker:
|
||||
|
||||
# We set abort to False here to prevent unbuildable targets raising
|
||||
# an exception when we're just generating data
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
|
||||
######## WARNING : this function requires cache_extra to be enabled ########
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -912,8 +815,8 @@ class BBCooker:
|
||||
for appends in appends_without_recipes
|
||||
for append in appends)
|
||||
msg = 'No recipes available for:\n%s' % '\n'.join(appendlines)
|
||||
warn_only = self.data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
False) or "no"
|
||||
warn_only = data.getVar("BB_DANGLINGAPPENDS_WARNONLY", \
|
||||
self.data, False) or "no"
|
||||
if warn_only.lower() in ("1", "yes", "true"):
|
||||
bb.warn(msg)
|
||||
else:
|
||||
@@ -960,8 +863,8 @@ class BBCooker:
|
||||
# Generate a list of parsed configuration files by searching the files
|
||||
# listed in the __depends and __base_depends variables with a .conf suffix.
|
||||
conffiles = []
|
||||
dep_files = self.data.getVar('__base_depends', False) or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends', False) or [])
|
||||
dep_files = self.data.getVar('__base_depends') or []
|
||||
dep_files = dep_files + (self.data.getVar('__depends') or [])
|
||||
|
||||
for f in dep_files:
|
||||
if f[0].endswith(".conf"):
|
||||
@@ -1115,30 +1018,42 @@ class BBCooker:
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
|
||||
if deps:
|
||||
try:
|
||||
deplist = bb.utils.explode_dep_versions2(deps)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
for dep, oplist in deplist.iteritems():
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
for dep in deplist:
|
||||
depsplit = dep.split(':')
|
||||
if len(depsplit) > 1:
|
||||
try:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
dep = depsplit[0]
|
||||
depnamelist.append(dep)
|
||||
|
||||
if dep in collection_list:
|
||||
for opstr in oplist:
|
||||
if depver:
|
||||
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
(op, depver) = opstr.split()
|
||||
if layerver:
|
||||
try:
|
||||
res = bb.utils.vercmp_string_op(layerver, depver, op)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
if not res:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver != depver:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
|
||||
errors = True
|
||||
collection_depends[c] = deplist.keys()
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
|
||||
@@ -1178,12 +1093,9 @@ class BBCooker:
|
||||
"""
|
||||
Setup any variables needed before starting a build
|
||||
"""
|
||||
t = time.gmtime()
|
||||
if not self.data.getVar("BUILDNAME", False):
|
||||
self.data.setVar("BUILDNAME", "${DATE}${TIME}")
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', t))
|
||||
self.data.setVar("DATE", time.strftime('%Y%m%d', t))
|
||||
self.data.setVar("TIME", time.strftime('%H%M%S', t))
|
||||
if not self.data.getVar("BUILDNAME"):
|
||||
self.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'))
|
||||
self.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()))
|
||||
|
||||
def matchFiles(self, bf):
|
||||
"""
|
||||
@@ -1193,7 +1105,7 @@ class BBCooker:
|
||||
bf = os.path.abspath(bf)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
@@ -1282,8 +1194,8 @@ class BBCooker:
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
taskdata.add_provider(self.data, self.recipecache, item)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", True)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
runlist = [[item, "do_%s" % task]]
|
||||
@@ -1310,7 +1222,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1350,12 +1262,11 @@ class BBCooker:
|
||||
return True
|
||||
return retval
|
||||
|
||||
build.reset_cache()
|
||||
self.buildSetVars()
|
||||
|
||||
taskdata, runlist, fulltargetlist = self.buildTaskData(targets, task, self.configuration.abort)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME", False)
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, fulltargetlist), self.data)
|
||||
|
||||
rq = bb.runqueue.RunQueue(self, self.data, self.recipecache, taskdata, runlist)
|
||||
@@ -1409,7 +1320,7 @@ class BBCooker:
|
||||
if base_image is None:
|
||||
imagefile.write("inherit core-image\n")
|
||||
else:
|
||||
topdir = self.data.getVar("TOPDIR", False)
|
||||
topdir = self.data.getVar("TOPDIR")
|
||||
if topdir in base_image:
|
||||
base_image = require_line.split()[1]
|
||||
imagefile.write("require " + base_image + "\n")
|
||||
@@ -1459,14 +1370,17 @@ class BBCooker:
|
||||
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
|
||||
bb.event.fire(bb.event.SanityCheck(False), self.data)
|
||||
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
self.recipecache.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.recipecache.ignored_dependencies.add(dep)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.parsecache_valid = True
|
||||
@@ -1479,13 +1393,8 @@ class BBCooker:
|
||||
raise bb.BBHandledException()
|
||||
self.show_appends_with_no_recipes()
|
||||
self.handlePrefProviders()
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
|
||||
self.state = state.running
|
||||
|
||||
# Send an event listing all stamps reachable after parsing
|
||||
# which the metadata may use to clean up stale data
|
||||
event = bb.event.ReachableStamps(self.recipecache.stamp)
|
||||
bb.event.fire(event, self.expanded_data)
|
||||
return None
|
||||
|
||||
return True
|
||||
@@ -1498,7 +1407,7 @@ class BBCooker:
|
||||
if len(pkgs_to_build) == 0:
|
||||
raise NothingToBuild
|
||||
|
||||
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
for pkg in pkgs_to_build:
|
||||
if pkg in ignore:
|
||||
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
@@ -1528,13 +1437,13 @@ class BBCooker:
|
||||
try:
|
||||
self.prhost = prserv.serv.auto_start(self.data)
|
||||
except prserv.serv.PRServiceConfigError:
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
self.state = state.error
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.data)
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
lockfile = self.lock.name
|
||||
self.lock.close()
|
||||
self.lock = None
|
||||
@@ -1563,7 +1472,6 @@ class BBCooker:
|
||||
msg += ":\n%s" % str(procs)
|
||||
print(msg)
|
||||
|
||||
|
||||
def shutdown(self, force = False):
|
||||
if force:
|
||||
self.state = state.forceshutdown
|
||||
@@ -1576,19 +1484,6 @@ class BBCooker:
|
||||
def reset(self):
|
||||
self.initConfigurationData()
|
||||
|
||||
def lockBitbake(self):
|
||||
if not hasattr(self, 'lock'):
|
||||
self.lock = None
|
||||
if self.data:
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
if lockfile:
|
||||
self.lock = bb.utils.lockfile(lockfile, False, False)
|
||||
return self.lock
|
||||
|
||||
def unlockBitbake(self):
|
||||
if hasattr(self, 'lock') and self.lock:
|
||||
bb.utils.unlockfile(self.lock)
|
||||
|
||||
def server_main(cooker, func, *args):
|
||||
cooker.pre_serve()
|
||||
|
||||
@@ -1752,7 +1647,7 @@ class CookerCollectFiles(object):
|
||||
filelist.append(filename)
|
||||
return filelist
|
||||
|
||||
def collection_priorities(self, pkgfns, d):
|
||||
def collection_priorities(self, pkgfns):
|
||||
|
||||
priorities = {}
|
||||
|
||||
@@ -1761,10 +1656,10 @@ class CookerCollectFiles(object):
|
||||
for p in pkgfns:
|
||||
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
|
||||
priorities[p] = self.calc_bbfile_priority(realfn, matched)
|
||||
|
||||
|
||||
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
|
||||
unmatched = set()
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
if not regex in matched:
|
||||
unmatched.add(regex)
|
||||
|
||||
@@ -1781,8 +1676,7 @@ class CookerCollectFiles(object):
|
||||
|
||||
for collection, pattern, regex, _ in self.bbfile_config_priorities:
|
||||
if regex in unmatched:
|
||||
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
return priorities
|
||||
|
||||
@@ -1848,6 +1742,8 @@ class Parser(multiprocessing.Process):
|
||||
finally:
|
||||
logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
|
||||
prof.dump_stats(logfile)
|
||||
bb.utils.process_profilelog(logfile)
|
||||
print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
|
||||
|
||||
def realrun(self):
|
||||
if self.init:
|
||||
@@ -1917,7 +1813,6 @@ class CookerParser(object):
|
||||
self.current = 0
|
||||
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count())
|
||||
self.process_names = []
|
||||
|
||||
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
|
||||
self.fromcache = []
|
||||
@@ -1953,7 +1848,6 @@ class CookerParser(object):
|
||||
for i in range(0, self.num_processes):
|
||||
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
|
||||
parser.start()
|
||||
self.process_names.append(parser.name)
|
||||
self.processes.append(parser)
|
||||
|
||||
self.results = itertools.chain(self.results, self.parse_generator())
|
||||
@@ -1997,16 +1891,6 @@ class CookerParser(object):
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.data)
|
||||
if self.cooker.configuration.profile:
|
||||
profiles = []
|
||||
for i in self.process_names:
|
||||
logfile = "profile-parse-%s.log" % i
|
||||
if os.path.exists(logfile):
|
||||
profiles.append(logfile)
|
||||
|
||||
pout = "profile-parse.log.processed"
|
||||
bb.utils.process_profilelog(profiles, pout = pout)
|
||||
print("Processed parsing statistics saved to %s" % (pout))
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
|
||||
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ConfigParameters(object):
|
||||
def __init__(self, argv=sys.argv):
|
||||
self.options, targets = self.parseCommandLine(argv)
|
||||
def __init__(self):
|
||||
self.options, targets = self.parseCommandLine()
|
||||
self.environment = self.parseEnvironment()
|
||||
|
||||
self.options.pkgs_to_build = targets or []
|
||||
@@ -46,7 +46,7 @@ class ConfigParameters(object):
|
||||
for key, val in self.options.__dict__.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
raise Exception("Caller must implement commandline option parsing")
|
||||
|
||||
def parseEnvironment(self):
|
||||
@@ -69,14 +69,14 @@ class ConfigParameters(object):
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server, environment):
|
||||
def updateToServer(self, server):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
ret, error = server.runCommand(["updateConfig", options])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
@@ -139,7 +139,6 @@ class CookerConfiguration(object):
|
||||
self.dry_run = False
|
||||
self.tracking = False
|
||||
self.interface = []
|
||||
self.writeeventlog = False
|
||||
|
||||
self.env = {}
|
||||
|
||||
@@ -173,12 +172,9 @@ def catch_parse_error(func):
|
||||
def wrapped(fn, *args):
|
||||
try:
|
||||
return func(fn, *args)
|
||||
except IOError as exc:
|
||||
except (IOError, bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
import traceback
|
||||
parselog.critical(traceback.format_exc())
|
||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
||||
sys.exit(1)
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
parselog.critical( traceback.format_exc())
|
||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
||||
sys.exit(1)
|
||||
return wrapped
|
||||
@@ -272,11 +268,8 @@ class CookerDataBuilder(object):
|
||||
layers = (data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
@@ -304,15 +297,15 @@ class CookerDataBuilder(object):
|
||||
|
||||
# Nomally we only register event handlers at the end of parsing .bb files
|
||||
# We register any handlers we've found so far here...
|
||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
for var in data.getVar('__BBHANDLERS') or []:
|
||||
bb.event.register(var, data.getVar(var), (data.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
if data.getVar("BB_WORKERCONTEXT", False) is None:
|
||||
bb.fetch.fetcher_init(data)
|
||||
bb.codeparser.parser_cache_init(data)
|
||||
bb.event.fire(bb.event.ConfigParsed(), data)
|
||||
|
||||
if data.getVar("BB_INVALIDCONF", False) is True:
|
||||
if data.getVar("BB_INVALIDCONF") is True:
|
||||
data.setVar("BB_INVALIDCONF", False)
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
return
|
||||
|
||||
@@ -84,7 +84,7 @@ def setVar(var, value, d):
|
||||
d.setVar(var, value)
|
||||
|
||||
|
||||
def getVar(var, d, exp = False):
|
||||
def getVar(var, d, exp = 0):
|
||||
"""Gets the value of a variable"""
|
||||
return d.getVar(var, exp)
|
||||
|
||||
@@ -159,12 +159,13 @@ def expandKeys(alterdata, readdata = None):
|
||||
|
||||
# These two for loops are split for performance to maximise the
|
||||
# usefulness of the expand cache
|
||||
for key in sorted(todolist):
|
||||
|
||||
for key in todolist:
|
||||
ekey = todolist[key]
|
||||
newval = alterdata.getVar(ekey, False)
|
||||
if newval is not None:
|
||||
val = alterdata.getVar(key, False)
|
||||
if val is not None:
|
||||
newval = alterdata.getVar(ekey, 0)
|
||||
if newval:
|
||||
val = alterdata.getVar(key, 0)
|
||||
if val is not None and newval is not None:
|
||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
||||
alterdata.renameVar(key, ekey)
|
||||
|
||||
@@ -174,7 +175,7 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
for s in savedenv.keys():
|
||||
if s in permitted:
|
||||
try:
|
||||
d.setVar(s, savedenv.getVar(s, True), op = 'from env')
|
||||
d.setVar(s, getVar(s, savedenv, True), op = 'from env')
|
||||
if s in exportlist:
|
||||
d.setVarFlag(s, "export", True, op = 'auto env export')
|
||||
except TypeError:
|
||||
@@ -182,39 +183,39 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
if d.getVarFlag(var, "python"):
|
||||
return False
|
||||
if getVarFlag(var, "python", d):
|
||||
return 0
|
||||
|
||||
export = d.getVarFlag(var, "export")
|
||||
unexport = d.getVarFlag(var, "unexport")
|
||||
func = d.getVarFlag(var, "func")
|
||||
export = getVarFlag(var, "export", d)
|
||||
unexport = getVarFlag(var, "unexport", d)
|
||||
func = getVarFlag(var, "func", d)
|
||||
if not all and not export and not unexport and not func:
|
||||
return False
|
||||
return 0
|
||||
|
||||
try:
|
||||
if all:
|
||||
oval = d.getVar(var, False)
|
||||
val = d.getVar(var, True)
|
||||
oval = getVar(var, d, 0)
|
||||
val = getVar(var, d, 1)
|
||||
except (KeyboardInterrupt, bb.build.FuncFailed):
|
||||
raise
|
||||
except Exception as exc:
|
||||
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
||||
return False
|
||||
return 0
|
||||
|
||||
if all:
|
||||
d.varhistory.emit(var, oval, val, o, d)
|
||||
d.varhistory.emit(var, oval, val, o)
|
||||
|
||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||
return False
|
||||
return 0
|
||||
|
||||
varExpanded = d.expand(var)
|
||||
varExpanded = expand(var, d)
|
||||
|
||||
if unexport:
|
||||
o.write('unset %s\n' % varExpanded)
|
||||
return False
|
||||
return 0
|
||||
|
||||
if val is None:
|
||||
return False
|
||||
return 0
|
||||
|
||||
val = str(val)
|
||||
|
||||
@@ -223,7 +224,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
val = val[3:] # Strip off "() "
|
||||
o.write("%s() %s\n" % (varExpanded, val))
|
||||
o.write("export -f %s\n" % (varExpanded))
|
||||
return True
|
||||
return 1
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
@@ -239,7 +240,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
alter = re.sub('\n', ' \\\n', alter)
|
||||
alter = re.sub('\\$', '\\\\$', alter)
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return False
|
||||
return 0
|
||||
|
||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
@@ -271,9 +272,8 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
|
||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
|
||||
for key in keys:
|
||||
emit_var(key, o, d, False)
|
||||
emit_var(key, o, d, False) and o.write('\n')
|
||||
|
||||
o.write('\n')
|
||||
emit_var(func, o, d, False) and o.write('\n')
|
||||
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
@@ -420,7 +420,7 @@ def generate_dependencies(d):
|
||||
deps = {}
|
||||
values = {}
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
for task in tasklist:
|
||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
||||
newdeps = deps[task]
|
||||
@@ -438,7 +438,7 @@ def generate_dependencies(d):
|
||||
return tasklist, deps, values
|
||||
|
||||
def inherits_class(klass, d):
|
||||
val = d.getVar('__inherit_cache', False) or []
|
||||
val = getVar('__inherit_cache', d) or []
|
||||
needle = os.path.join('classes', '%s.bbclass' % klass)
|
||||
for v in val:
|
||||
if v.endswith(needle):
|
||||
|
||||
@@ -54,36 +54,27 @@ def infer_caller_details(loginfo, parent = False, varval = True):
|
||||
return
|
||||
# Infer caller's likely values for variable (var) and value (value),
|
||||
# to reduce clutter in the rest of the code.
|
||||
above = None
|
||||
def set_above():
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
try:
|
||||
raise Exception
|
||||
except Exception:
|
||||
tb = sys.exc_info()[2]
|
||||
if parent:
|
||||
return tb.tb_frame.f_back.f_back.f_back
|
||||
above = tb.tb_frame.f_back.f_back
|
||||
else:
|
||||
return tb.tb_frame.f_back.f_back
|
||||
|
||||
if varval and ('variable' not in loginfo or 'detail' not in loginfo):
|
||||
if not above:
|
||||
above = set_above()
|
||||
lcls = above.f_locals.items()
|
||||
above = tb.tb_frame.f_back
|
||||
lcls = above.f_locals.items()
|
||||
for k, v in lcls:
|
||||
if k == 'value' and 'detail' not in loginfo:
|
||||
loginfo['detail'] = v
|
||||
if k == 'var' and 'variable' not in loginfo:
|
||||
loginfo['variable'] = v
|
||||
# Infer file/line/function from traceback
|
||||
# Don't use traceback.extract_stack() since it fills the line contents which
|
||||
# we don't need and that hits stat syscalls
|
||||
if 'file' not in loginfo:
|
||||
if not above:
|
||||
above = set_above()
|
||||
f = above.f_back
|
||||
line = f.f_lineno
|
||||
file = f.f_code.co_filename
|
||||
func = f.f_code.co_name
|
||||
depth = 3
|
||||
if parent:
|
||||
depth = 4
|
||||
file, line, func, text = traceback.extract_stack(limit = depth)[0]
|
||||
loginfo['file'] = file
|
||||
loginfo['line'] = line
|
||||
if func not in loginfo:
|
||||
@@ -240,10 +231,6 @@ class VariableHistory(object):
|
||||
|
||||
if var not in self.variables:
|
||||
self.variables[var] = []
|
||||
if not isinstance(self.variables[var], list):
|
||||
return
|
||||
if 'nodups' in loginfo and loginfo in self.variables[var]:
|
||||
return
|
||||
self.variables[var].append(loginfo.copy())
|
||||
|
||||
def variable(self, var):
|
||||
@@ -252,20 +239,8 @@ class VariableHistory(object):
|
||||
else:
|
||||
return []
|
||||
|
||||
def emit(self, var, oval, val, o, d):
|
||||
def emit(self, var, oval, val, o):
|
||||
history = self.variable(var)
|
||||
|
||||
# Append override history
|
||||
if var in d.overridedata:
|
||||
for (r, override) in d.overridedata[var]:
|
||||
for event in self.variable(r):
|
||||
loginfo = event.copy()
|
||||
if 'flag' in loginfo and not loginfo['flag'].startswith("_"):
|
||||
continue
|
||||
loginfo['variable'] = var
|
||||
loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
|
||||
history.append(loginfo)
|
||||
|
||||
commentVal = re.sub('\n', '\n#', str(oval))
|
||||
if history:
|
||||
if len(history) == 1:
|
||||
@@ -321,23 +296,18 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self):
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
# cookie monster tribute
|
||||
# Need to be careful about writes to overridedata as
|
||||
# its only a shallow copy, could influence other data store
|
||||
# copies!
|
||||
self.overridedata = {}
|
||||
self.overrides = None
|
||||
self.overridevars = set(["OVERRIDES", "FILE"])
|
||||
self.inoverride = False
|
||||
self._special_values = special
|
||||
self._seen_overrides = seen
|
||||
|
||||
self.expand_cache = {}
|
||||
|
||||
def enableTracking(self):
|
||||
self._tracking = True
|
||||
@@ -379,25 +349,97 @@ class DataSmart(MutableMapping):
|
||||
def expand(self, s, varname = None):
|
||||
return self.expandWithRefs(s, varname).value
|
||||
|
||||
|
||||
def finalize(self, parent = False):
|
||||
return
|
||||
|
||||
def internal_finalize(self, parent = False):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
self.overrides = None
|
||||
|
||||
def need_overrides(self):
|
||||
if self.overrides is None:
|
||||
if self.inoverride:
|
||||
return
|
||||
self.inoverride = True
|
||||
# Can end up here recursively so setup dummy values
|
||||
self.overrides = []
|
||||
self.overridesset = set()
|
||||
self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
self.overridesset = set(self.overrides)
|
||||
self.inoverride = False
|
||||
self.expand_cache = {}
|
||||
overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
|
||||
finalize_caller = {
|
||||
'op': 'finalize',
|
||||
}
|
||||
infer_caller_details(finalize_caller, parent = parent, varval = False)
|
||||
|
||||
#
|
||||
# Well let us see what breaks here. We used to iterate
|
||||
# over each variable and apply the override and then
|
||||
# do the line expanding.
|
||||
# If we have bad luck - which we will have - the keys
|
||||
# where in some order that is so important for this
|
||||
# method which we don't have anymore.
|
||||
# Anyway we will fix that and write test cases this
|
||||
# time.
|
||||
|
||||
#
|
||||
# First we apply all overrides
|
||||
# Then we will handle _append and _prepend and store the _remove
|
||||
# information for later.
|
||||
#
|
||||
|
||||
# We only want to report finalization once per variable overridden.
|
||||
finalizes_reported = {}
|
||||
|
||||
for o in overrides:
|
||||
# calculate '_'+override
|
||||
l = len(o) + 1
|
||||
|
||||
# see if one should even try
|
||||
if o not in self._seen_overrides:
|
||||
continue
|
||||
|
||||
vars = self._seen_overrides[o].copy()
|
||||
for var in vars:
|
||||
name = var[:-l]
|
||||
try:
|
||||
# Report only once, even if multiple changes.
|
||||
if name not in finalizes_reported:
|
||||
finalizes_reported[name] = True
|
||||
finalize_caller['variable'] = name
|
||||
finalize_caller['detail'] = 'was: ' + str(self.getVar(name, False))
|
||||
self.varhistory.record(**finalize_caller)
|
||||
# Copy history of the override over.
|
||||
for event in self.varhistory.variable(var):
|
||||
loginfo = event.copy()
|
||||
loginfo['variable'] = name
|
||||
loginfo['op'] = 'override[%s]:%s' % (o, loginfo['op'])
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(name, self.getVar(var, False), op = 'finalize', file = 'override[%s]' % o, line = '')
|
||||
self.delVar(var)
|
||||
except Exception:
|
||||
logger.info("Untracked delVar")
|
||||
|
||||
# now on to the appends and prepends, and stashing the removes
|
||||
for op in __setvar_keyword__:
|
||||
if op in self._special_values:
|
||||
appends = self._special_values[op] or []
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
if op == "_append":
|
||||
sval = self.getVar(append, False) or ""
|
||||
sval += a
|
||||
self.setVar(append, sval)
|
||||
elif op == "_prepend":
|
||||
sval = a + (self.getVar(append, False) or "")
|
||||
self.setVar(append, sval)
|
||||
elif op == "_remove":
|
||||
removes = self.getVarFlag(append, "_removeactive", False) or []
|
||||
removes.extend(a.split())
|
||||
self.setVarFlag(append, "_removeactive", removes, ignore=True)
|
||||
|
||||
# We save overrides that may be applied at some later stage
|
||||
if keep:
|
||||
self.setVarFlag(append, op, keep, ignore=True)
|
||||
else:
|
||||
self.delVarFlag(append, op, ignore=True)
|
||||
|
||||
def initVar(self, var):
|
||||
self.expand_cache = {}
|
||||
@@ -428,10 +470,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def setVar(self, var, value, **loginfo):
|
||||
#print("var=" + str(var) + " val=" + str(value))
|
||||
parsing=False
|
||||
if 'parsing' in loginfo:
|
||||
parsing=True
|
||||
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
self.expand_cache = {}
|
||||
@@ -453,82 +491,52 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
# todo make sure keyword is not __doc__ or __module__
|
||||
# pay the cookie monster
|
||||
try:
|
||||
self._special_values[keyword].add(base)
|
||||
except KeyError:
|
||||
self._special_values[keyword] = set()
|
||||
self._special_values[keyword].add(base)
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(base, **loginfo)
|
||||
|
||||
|
||||
if base in self.overridevars:
|
||||
self.overridevars.update(self.expandWithRefs(value, var).references)
|
||||
self.internal_finalize(True)
|
||||
return
|
||||
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
if not parsing:
|
||||
if "_append" in self.dict[var]:
|
||||
del self.dict[var]["_append"]
|
||||
if "_prepend" in self.dict[var]:
|
||||
del self.dict[var]["_prepend"]
|
||||
if var in self.overridedata:
|
||||
active = []
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
active.append(r)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active.append(r)
|
||||
for a in active:
|
||||
self.delVar(a)
|
||||
del self.overridedata[var]
|
||||
|
||||
# more cookies for the cookie monster
|
||||
if '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
# setting var
|
||||
self.dict[var]["_content"] = value
|
||||
self.varhistory.record(**loginfo)
|
||||
|
||||
if var in self.overridevars:
|
||||
self.overridevars.update(self.expandWithRefs(value, var).references)
|
||||
self.internal_finalize(True)
|
||||
|
||||
def _setvar_update_overrides(self, var, **loginfo):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override:
|
||||
if shortvar not in self.overridedata:
|
||||
self.overridedata[shortvar] = []
|
||||
if [var, override] not in self.overridedata[shortvar]:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].append([var, override])
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
|
||||
def getVar(self, var, expand=False, noweakdefault=False, parsing=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault, parsing)
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
|
||||
def renameVar(self, key, newkey, **loginfo):
|
||||
"""
|
||||
Rename the variable key to newkey
|
||||
"""
|
||||
val = self.getVar(key, 0, parsing=True)
|
||||
val = self.getVar(key, 0)
|
||||
if val is not None:
|
||||
loginfo['variable'] = newkey
|
||||
loginfo['op'] = 'rename from %s' % key
|
||||
loginfo['detail'] = val
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(newkey, val, ignore=True, parsing=True)
|
||||
self.setVar(newkey, val, ignore=True)
|
||||
|
||||
for i in (__setvar_keyword__):
|
||||
src = self.getVarFlag(key, i)
|
||||
@@ -539,14 +547,9 @@ class DataSmart(MutableMapping):
|
||||
dest.extend(src)
|
||||
self.setVarFlag(newkey, i, dest, ignore=True)
|
||||
|
||||
if key in self.overridedata:
|
||||
self.overridedata[newkey] = []
|
||||
for (v, o) in self.overridedata[key]:
|
||||
self.overridedata[newkey].append([v.replace(key, newkey), o])
|
||||
self.renameVar(v, v.replace(key, newkey))
|
||||
|
||||
if '_' in newkey and val is None:
|
||||
self._setvar_update_overrides(newkey, **loginfo)
|
||||
if i in self._special_values and key in self._special_values[i]:
|
||||
self._special_values[i].remove(key)
|
||||
self._special_values[i].add(newkey)
|
||||
|
||||
loginfo['variable'] = key
|
||||
loginfo['op'] = 'rename (to)'
|
||||
@@ -557,12 +560,14 @@ class DataSmart(MutableMapping):
|
||||
def appendVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'append'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_append", value, ignore=True, parsing=True)
|
||||
newvalue = (self.getVar(var, False) or "") + value
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def prependVar(self, var, value, **loginfo):
|
||||
loginfo['op'] = 'prepend'
|
||||
self.varhistory.record(**loginfo)
|
||||
self.setVar(var + "_prepend", value, ignore=True, parsing=True)
|
||||
newvalue = value + (self.getVar(var, False) or "")
|
||||
self.setVar(var, newvalue, ignore=True)
|
||||
|
||||
def delVar(self, var, **loginfo):
|
||||
loginfo['detail'] = ""
|
||||
@@ -570,28 +575,12 @@ class DataSmart(MutableMapping):
|
||||
self.varhistory.record(**loginfo)
|
||||
self.expand_cache = {}
|
||||
self.dict[var] = {}
|
||||
if var in self.overridedata:
|
||||
del self.overridedata[var]
|
||||
if '_' in var:
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override:
|
||||
try:
|
||||
if shortvar in self.overridedata:
|
||||
# Force CoW by recreating the list first
|
||||
self.overridedata[shortvar] = list(self.overridedata[shortvar])
|
||||
self.overridedata[shortvar].remove([var, override])
|
||||
except ValueError as e:
|
||||
pass
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
if len(shortvar) == 0:
|
||||
override = None
|
||||
if override and override in self._seen_overrides and var in self._seen_overrides[override]:
|
||||
self._seen_overrides[override].remove(var)
|
||||
|
||||
def setVarFlag(self, var, flag, value, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if 'op' not in loginfo:
|
||||
loginfo['op'] = "set"
|
||||
loginfo['flag'] = flag
|
||||
@@ -600,8 +589,8 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var, **loginfo)
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
if not "__exportlist" in self.dict:
|
||||
@@ -610,71 +599,14 @@ class DataSmart(MutableMapping):
|
||||
self.dict["__exportlist"]["_content"] = set()
|
||||
self.dict["__exportlist"]["_content"].add(var)
|
||||
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False, parsing=False):
|
||||
def getVarFlag(self, var, flag, expand=False, noweakdefault=False):
|
||||
local_var = self._findVar(var)
|
||||
value = None
|
||||
if flag == "_content" and var in self.overridedata and not parsing:
|
||||
match = False
|
||||
active = {}
|
||||
self.need_overrides()
|
||||
for (r, o) in self.overridedata[var]:
|
||||
# What about double overrides both with "_" in the name?
|
||||
if o in self.overridesset:
|
||||
active[o] = r
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
active[o] = r
|
||||
|
||||
mod = True
|
||||
while mod:
|
||||
mod = False
|
||||
for o in self.overrides:
|
||||
for a in active.copy():
|
||||
if a.endswith("_" + o):
|
||||
t = active[a]
|
||||
del active[a]
|
||||
active[a.replace("_" + o, "")] = t
|
||||
mod = True
|
||||
elif a == o:
|
||||
match = active[a]
|
||||
del active[a]
|
||||
if match:
|
||||
value = self.getVar(match)
|
||||
|
||||
if local_var is not None and value is None:
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
|
||||
|
||||
if flag == "_content" and local_var is not None and "_append" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_append"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = value + r
|
||||
|
||||
if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing:
|
||||
if not value:
|
||||
value = ""
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_prepend"]:
|
||||
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
value = r + value
|
||||
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -683,30 +615,19 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_remove" in local_var:
|
||||
removes = []
|
||||
self.need_overrides()
|
||||
for (r, o) in local_var["_remove"]:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in self.overrides:
|
||||
match = False
|
||||
if match:
|
||||
removes.extend(self.expand(r).split())
|
||||
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
removes = [self.expand(r).split() for r in local_var["_removeactive"]]
|
||||
removes = reduce(lambda a, b: a+b, removes, [])
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
value = " ".join(filtered)
|
||||
if expand and var in self.expand_cache:
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
# flag == "_content" here
|
||||
self.expand_cache[var].value = value
|
||||
return value
|
||||
|
||||
def delVarFlag(self, var, flag, **loginfo):
|
||||
self.expand_cache = {}
|
||||
local_var = self._findVar(var)
|
||||
if not local_var:
|
||||
return
|
||||
@@ -736,7 +657,6 @@ class DataSmart(MutableMapping):
|
||||
self.setVarFlag(var, flag, newvalue, ignore=True)
|
||||
|
||||
def setVarFlags(self, var, flags, **loginfo):
|
||||
self.expand_cache = {}
|
||||
infer_caller_details(loginfo)
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
@@ -766,7 +686,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
|
||||
def delVarFlags(self, var, **loginfo):
|
||||
self.expand_cache = {}
|
||||
if not var in self.dict:
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
@@ -784,12 +703,13 @@ class DataSmart(MutableMapping):
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
|
||||
def createCopy(self):
|
||||
"""
|
||||
Create a copy of self by setting _data to self
|
||||
"""
|
||||
# we really want this to be a DataSmart...
|
||||
data = DataSmart()
|
||||
data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
|
||||
data.dict["_data"] = self.dict
|
||||
data.varhistory = self.varhistory.copy()
|
||||
data.varhistory.datasmart = data
|
||||
@@ -797,12 +717,6 @@ class DataSmart(MutableMapping):
|
||||
|
||||
data._tracking = self._tracking
|
||||
|
||||
data.overrides = None
|
||||
data.overridevars = copy.copy(self.overridevars)
|
||||
# Should really be a deepcopy but has heavy overhead.
|
||||
# Instead, we're careful with writes.
|
||||
data.overridedata = copy.copy(self.overridedata)
|
||||
|
||||
return data
|
||||
|
||||
def expandVarref(self, variable, parents=False):
|
||||
@@ -827,19 +741,12 @@ class DataSmart(MutableMapping):
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
overrides = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
if key == "_data":
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if key in overrides:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
klist.add(key)
|
||||
|
||||
@@ -848,21 +755,9 @@ class DataSmart(MutableMapping):
|
||||
|
||||
return klist
|
||||
|
||||
self.need_overrides()
|
||||
for var in self.overridedata:
|
||||
for (r, o) in self.overridedata[var]:
|
||||
if o in self.overridesset:
|
||||
overrides.add(var)
|
||||
elif "_" in o:
|
||||
if set(o.split("_")).issubset(self.overridesset):
|
||||
overrides.add(var)
|
||||
|
||||
for k in keylist(self.dict):
|
||||
yield k
|
||||
|
||||
for k in overrides:
|
||||
yield k
|
||||
|
||||
def __len__(self):
|
||||
return len(frozenset(self))
|
||||
|
||||
|
||||
@@ -68,14 +68,9 @@ _ui_logfilters = {}
|
||||
_ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in __builtins__:
|
||||
__builtins__['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
@@ -91,8 +86,6 @@ def execute_handler(name, handler, event, d):
|
||||
raise
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del __builtins__['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -102,9 +95,6 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
continue
|
||||
execute_handler(name, handler, event, d)
|
||||
|
||||
ui_queue = []
|
||||
@@ -215,10 +205,6 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
@@ -503,16 +489,6 @@ class TargetsTreeGenerated(Event):
|
||||
Event.__init__(self)
|
||||
self._model = model
|
||||
|
||||
class ReachableStamps(Event):
|
||||
"""
|
||||
An event listing all stamps reachable after parsing
|
||||
which the metadata may use to clean up stale data
|
||||
"""
|
||||
|
||||
def __init__(self, stamps):
|
||||
Event.__init__(self)
|
||||
self.stamps = stamps
|
||||
|
||||
class FilesMatchingFound(Event):
|
||||
"""
|
||||
Event when a list of files matching the supplied pattern has
|
||||
|
||||
@@ -45,13 +45,6 @@ _checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
class BBFetchException(Exception):
|
||||
"""Class all fetch exceptions inherit from"""
|
||||
def __init__(self, message):
|
||||
@@ -61,17 +54,6 @@ class BBFetchException(Exception):
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
class UntrustedUrl(BBFetchException):
|
||||
"""Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is not trusted and cannot be used" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
|
||||
class MalformedUrl(BBFetchException):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
def __init__(self, url, message=''):
|
||||
@@ -543,7 +525,7 @@ def fetcher_compare_revisions(d):
|
||||
def mirror_from_string(data):
|
||||
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||
|
||||
def verify_checksum(ud, d, precomputed={}):
|
||||
def verify_checksum(ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
@@ -551,28 +533,13 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
|
||||
Returns a dict of checksums that can be stored in a done stamp file and
|
||||
passed in as precomputed parameter in a later call to avoid re-computing
|
||||
the checksums from the file. This allows verifying the checksums of the
|
||||
file against those in the recipe each time, rather than only after
|
||||
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
|
||||
"""
|
||||
|
||||
_MD5_KEY = "md5"
|
||||
_SHA256_KEY = "sha256"
|
||||
if not ud.method.supports_checksum(ud):
|
||||
return
|
||||
|
||||
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
|
||||
return {}
|
||||
|
||||
if _MD5_KEY in precomputed:
|
||||
md5data = precomputed[_MD5_KEY]
|
||||
else:
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
|
||||
if _SHA256_KEY in precomputed:
|
||||
sha256data = precomputed[_SHA256_KEY]
|
||||
else:
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
@@ -622,72 +589,6 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
|
||||
|
||||
return {
|
||||
_MD5_KEY: md5data,
|
||||
_SHA256_KEY: sha256data
|
||||
}
|
||||
|
||||
|
||||
def verify_donestamp(ud, d):
|
||||
"""
|
||||
Check whether the done stamp file has the right checksums (if the fetch
|
||||
method supports them). If it doesn't, delete the done stamp and force
|
||||
a re-download.
|
||||
|
||||
Returns True, if the donestamp exists and is valid, False otherwise. When
|
||||
returning False, any existing done stamps are removed.
|
||||
"""
|
||||
if not os.path.exists(ud.donestamp):
|
||||
return False
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
# done stamp exists, checksums not supported; assume the local file is
|
||||
# current
|
||||
return True
|
||||
|
||||
if not os.path.exists(ud.localpath):
|
||||
# done stamp exists, but the downloaded file does not; the done stamp
|
||||
# must be incorrect, re-trigger the download
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
precomputed_checksums = {}
|
||||
# Only re-use the precomputed checksums if the donestamp is newer than the
|
||||
# file. Do not rely on the mtime of directories, though. If ud.localpath is
|
||||
# a directory, there will probably not be any checksums anyway.
|
||||
if (os.path.isdir(ud.localpath) or
|
||||
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
|
||||
try:
|
||||
with open(ud.donestamp, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
precomputed_checksums.update(pickled.load())
|
||||
except Exception as e:
|
||||
# Avoid the warnings on the upgrade path from emtpy done stamp
|
||||
# files to those containing the checksums.
|
||||
if not isinstance(e, EOFError):
|
||||
# Ignore errors, they aren't fatal
|
||||
logger.warn("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
|
||||
try:
|
||||
checksums = verify_checksum(ud, d, precomputed_checksums)
|
||||
# If the cache file did not have the checksums, compute and store them
|
||||
# as an upgrade path from the previous done stamp file format.
|
||||
if checksums != precomputed_checksums:
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
return True
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
|
||||
def update_stamp(ud, d):
|
||||
"""
|
||||
@@ -702,11 +603,8 @@ def update_stamp(ud, d):
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
else:
|
||||
checksums = verify_checksum(ud, d)
|
||||
# Store the checksums for later re-verification against the recipe
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
verify_checksum(ud, d)
|
||||
open(ud.donestamp, 'w').close()
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
@@ -720,18 +618,13 @@ def get_autorev(d):
|
||||
d.setVar('__BB_DONT_CACHE', '1')
|
||||
return "AUTOINC"
|
||||
|
||||
def get_srcrev(d, method_name='sortable_revision'):
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the revsion string, usually for use in the version string (PV) of the current package
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
|
||||
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
|
||||
incremental, other code is then responsible for turning that into an increasing value (if needed)
|
||||
|
||||
A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
|
||||
that fetcher provides a method with the given name and the same signature as sortable_revision.
|
||||
"""
|
||||
|
||||
scms = []
|
||||
@@ -745,7 +638,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||
|
||||
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
|
||||
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
autoinc, rev = urldata[scms[0]].method.sortable_revision(urldata[scms[0]], d, urldata[scms[0]].names[0])
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
if autoinc:
|
||||
@@ -763,7 +656,7 @@ def get_srcrev(d, method_name='sortable_revision'):
|
||||
for scm in scms:
|
||||
ud = urldata[scm]
|
||||
for name in ud.names:
|
||||
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
|
||||
autoinc, rev = ud.method.sortable_revision(ud, d, name)
|
||||
seenautoinc = seenautoinc or autoinc
|
||||
if len(rev) > 10:
|
||||
rev = rev[:10]
|
||||
@@ -797,8 +690,6 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
'NO_PROXY', 'no_proxy',
|
||||
'ALL_PROXY', 'all_proxy',
|
||||
'GIT_PROXY_COMMAND',
|
||||
'GIT_SSL_CAINFO',
|
||||
'GIT_SMART_HTTP',
|
||||
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
|
||||
'SOCKS5_USER', 'SOCKS5_PASSWD']
|
||||
|
||||
@@ -867,11 +758,6 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
newuri = uri_replace(ud, find, replace, replacements, ld)
|
||||
if not newuri or newuri in uris or newuri == origud.url:
|
||||
continue
|
||||
|
||||
if not trusted_network(ld, newuri):
|
||||
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
|
||||
continue
|
||||
|
||||
try:
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
@@ -879,9 +765,7 @@ def build_mirroruris(origud, mirrors, ld):
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
# setup_localpath of file:// urls may fail, we should still see
|
||||
# if mirrors of the url exist
|
||||
adduri(newud, uris, uds)
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
@@ -907,19 +791,19 @@ def rename_bad_checksum(ud, suffix):
|
||||
bb.utils.movefile(ud.localpath, new_localpath)
|
||||
|
||||
|
||||
def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
def try_mirror_url(origud, ud, ld, check = False):
|
||||
# Return of None or a value means we're finished
|
||||
# False means try another url
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(fetch, ud, ld)
|
||||
found = ud.method.checkstatus(ud, ld)
|
||||
if found:
|
||||
return found
|
||||
return False
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not verify_donestamp(ud, ld) or ud.method.need_update(ud, ld):
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
|
||||
ud.method.download(ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(ud, ld)
|
||||
@@ -935,13 +819,12 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
# Create donestamp in old format to avoid triggering a re-download
|
||||
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
|
||||
if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
|
||||
origud.method.download(origud, ld)
|
||||
if hasattr(origud.method,"build_mirror_data"):
|
||||
origud.method.build_mirror_data(origud, ld)
|
||||
@@ -975,7 +858,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
pass
|
||||
return False
|
||||
|
||||
def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
def try_mirrors(d, origud, mirrors, check = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
This method will be automatically called before the fetchers go.
|
||||
@@ -989,46 +872,11 @@ def try_mirrors(fetch, d, origud, mirrors, check = False):
|
||||
uris, uds = build_mirroruris(origud, mirrors, ld)
|
||||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
|
||||
ret = try_mirror_url(origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def trusted_network(d, url):
|
||||
"""
|
||||
Use a trusted url during download if networking is enabled and
|
||||
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
|
||||
Note: modifies SRC_URI & mirrors.
|
||||
"""
|
||||
if d.getVar('BB_NO_NETWORK', True) == "1":
|
||||
return True
|
||||
|
||||
pkgname = d.expand(d.getVar('PN', False))
|
||||
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname)
|
||||
|
||||
if not trusted_hosts:
|
||||
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True)
|
||||
|
||||
# Not enabled.
|
||||
if not trusted_hosts:
|
||||
return True
|
||||
|
||||
scheme, network, path, user, passwd, param = decodeurl(url)
|
||||
|
||||
if not network:
|
||||
return True
|
||||
|
||||
network = network.lower()
|
||||
|
||||
for host in trusted_hosts.split(" "):
|
||||
host = host.lower()
|
||||
if host.startswith("*.") and ("." + network).endswith(host[1:]):
|
||||
return True
|
||||
if host == network:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def srcrev_internal_helper(ud, d, name):
|
||||
"""
|
||||
Return:
|
||||
@@ -1194,7 +1042,6 @@ class FetchData(object):
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.ignore_checksums = False
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -1351,9 +1198,9 @@ class FetchMethod(object):
|
||||
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
|
||||
(file, urldata.parm.get('unpack')))
|
||||
|
||||
base, ext = os.path.splitext(file)
|
||||
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
|
||||
efile = os.path.join(rootdir, os.path.basename(base))
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -1373,10 +1220,6 @@ class FetchMethod(object):
|
||||
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.xz'):
|
||||
cmd = 'xz -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.tar.lz'):
|
||||
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.lz'):
|
||||
cmd = 'lzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip') or file.endswith('.jar'):
|
||||
try:
|
||||
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
|
||||
@@ -1473,7 +1316,7 @@ class FetchMethod(object):
|
||||
"""
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of a URL
|
||||
Assumes localpath was called first
|
||||
@@ -1505,7 +1348,7 @@ class FetchMethod(object):
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
|
||||
def __init__(self, urls, d, cache = True, localonly = False):
|
||||
if localonly and cache:
|
||||
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
|
||||
|
||||
@@ -1514,7 +1357,6 @@ class Fetch(object):
|
||||
self.urls = urls
|
||||
self.d = d
|
||||
self.ud = {}
|
||||
self.connection_cache = connection_cache
|
||||
|
||||
fn = d.getVar('FILE', True)
|
||||
if cache and fn and fn in urldata_cache:
|
||||
@@ -1573,12 +1415,12 @@ class Fetch(object):
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
|
||||
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors, False)
|
||||
localpath = try_mirrors(self.d, ud, mirrors, False)
|
||||
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
@@ -1586,10 +1428,8 @@ class Fetch(object):
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)):
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
|
||||
try:
|
||||
if not trusted_network(self.d, ud.url):
|
||||
raise UntrustedUrl(ud.url)
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
if hasattr(m, "build_mirror_data"):
|
||||
@@ -1617,7 +1457,7 @@ class Fetch(object):
|
||||
m.clean(ud, self.d)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
localpath = try_mirrors(self, self.d, ud, mirrors)
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
|
||||
if firsterr:
|
||||
@@ -1649,15 +1489,15 @@ class Fetch(object):
|
||||
logger.debug(1, "Testing URL %s", u)
|
||||
# First try checking uri, u, from PREMIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
if not ret:
|
||||
# Next try checking from the original uri, u
|
||||
try:
|
||||
ret = m.checkstatus(self, ud, self.d)
|
||||
ret = m.checkstatus(ud, self.d)
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
ret = try_mirrors(self, self.d, ud, mirrors, True)
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u, u)
|
||||
@@ -1712,42 +1552,6 @@ class Fetch(object):
|
||||
if ud.lockfile:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
class FetchConnectionCache(object):
|
||||
"""
|
||||
A class which represents an container for socket connections.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def get_connection_name(self, host, port):
|
||||
return host + ':' + str(port)
|
||||
|
||||
def add_connection(self, host, port, connection):
|
||||
cn = self.get_connection_name(host, port)
|
||||
|
||||
if cn not in self.cache:
|
||||
self.cache[cn] = connection
|
||||
|
||||
def get_connection(self, host, port):
|
||||
connection = None
|
||||
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
connection = self.cache[cn]
|
||||
|
||||
return connection
|
||||
|
||||
def remove_connection(self, host, port):
|
||||
cn = self.get_connection_name(host, port)
|
||||
if cn in self.cache:
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
def close_connections(self):
|
||||
for cn in self.cache.keys():
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
from . import cvs
|
||||
from . import git
|
||||
from . import gitsm
|
||||
|
||||
@@ -9,7 +9,7 @@ Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
@@ -113,7 +113,7 @@ class ClearCase(FetchMethod):
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV", False)
|
||||
ud.label = d.getVar("SRCREV")
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
@@ -67,7 +67,6 @@ Supported SRC_URI options are:
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -178,6 +177,11 @@ class Git(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
@@ -186,7 +190,7 @@ class Git(FetchMethod):
|
||||
os.chdir(ud.clonedir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
repourl = self._get_repo_url(ud)
|
||||
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
@@ -272,22 +276,13 @@ class Git(FetchMethod):
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
os.chdir(destdir)
|
||||
repourl = self._get_repo_url(ud)
|
||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||
elif not ud.nobranch:
|
||||
branchname = ud.branches[ud.names[0]]
|
||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
||||
ud.revisions[ud.names[0]]), d)
|
||||
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
|
||||
branchname), d)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||
|
||||
return True
|
||||
|
||||
def clean(self, ud, d):
|
||||
@@ -316,16 +311,6 @@ class Git(FetchMethod):
|
||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||
return output.split()[0] != "0"
|
||||
|
||||
def _get_repo_url(self, ud):
|
||||
"""
|
||||
Return the repository URL
|
||||
"""
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
"""
|
||||
Return a unique key for the url
|
||||
@@ -336,9 +321,13 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Run git ls-remote with the specified search string
|
||||
"""
|
||||
repourl = self._get_repo_url(ud)
|
||||
cmd = "%s ls-remote %s %s" % \
|
||||
(ud.basecmd, repourl, search)
|
||||
if ud.user:
|
||||
username = ud.user + '@'
|
||||
else:
|
||||
username = ""
|
||||
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(ud.basecmd, ud.proto, username, ud.host, ud.path, search)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
@@ -350,95 +339,20 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
pupver = ('', '')
|
||||
|
||||
tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return pupver
|
||||
|
||||
verstring = ""
|
||||
revision = ""
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
tag_head = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", tag_head)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(tag_head)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
|
||||
verstring = tag
|
||||
revision = line.split()[0]
|
||||
pupver = (verstring, revision)
|
||||
|
||||
return pupver
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def gitpkgv_revision(self, ud, d, name):
|
||||
"""
|
||||
Return a sortable revision number by counting commits in the history
|
||||
Based on gitpkgv.bblass in meta-openembedded
|
||||
"""
|
||||
rev = self._build_revision(ud, d, name)
|
||||
localpath = ud.localpath
|
||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
||||
if not os.path.exists(localpath):
|
||||
commits = None
|
||||
else:
|
||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
||||
from pipes import quote
|
||||
commits = bb.fetch2.runfetchcmd(
|
||||
"git rev-list %s -- | wc -l" % (quote(rev)),
|
||||
d, quiet=True).strip().lstrip('0')
|
||||
if commits:
|
||||
open(rev_file, "w").write("%d\n" % int(commits))
|
||||
else:
|
||||
commits = open(rev_file, "r").readline(128).strip()
|
||||
if commits:
|
||||
return False, "%s+%s" % (commits, rev[:7])
|
||||
else:
|
||||
return True, str(rev)
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -109,7 +109,6 @@ class GitSM(Git):
|
||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
||||
os.chdir(tmpclonedir)
|
||||
runfetchcmd(ud.basecmd + " reset --hard", d)
|
||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
|
||||
runfetchcmd(ud.basecmd + " submodule init", d)
|
||||
runfetchcmd(ud.basecmd + " submodule update", d)
|
||||
self._set_relative_paths(tmpclonedir)
|
||||
|
||||
@@ -43,13 +43,6 @@ class Hg(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['hg']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Don't require checksums for local archives created from
|
||||
repository checkouts.
|
||||
"""
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init hg specific variable within url data
|
||||
@@ -59,12 +52,10 @@ class Hg(FetchMethod):
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
elif not ud.host:
|
||||
ud.proto = 'file'
|
||||
else:
|
||||
ud.proto = "hg"
|
||||
# Create paths to mercurial checkouts
|
||||
relpath = self._strip_leading_slashes(ud.path)
|
||||
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
@@ -73,19 +64,7 @@ class Hg(FetchMethod):
|
||||
elif not ud.revision:
|
||||
ud.revision = self.latest_revision(ud, d)
|
||||
|
||||
# Create paths to mercurial checkouts
|
||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
||||
ud.host, ud.path.replace('/', '.'))
|
||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
|
||||
hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/")
|
||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||
ud.localfile = ud.moddir
|
||||
ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg"
|
||||
|
||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
revTag = ud.parm.get('rev', 'tip')
|
||||
@@ -95,21 +74,14 @@ class Hg(FetchMethod):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_premirror(self, ud, d):
|
||||
# If we don't do this, updating an existing checkout with only premirrors
|
||||
# is not possible
|
||||
if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None:
|
||||
return True
|
||||
if os.path.exists(ud.moddir):
|
||||
return False
|
||||
return True
|
||||
|
||||
def _buildhgcommand(self, ud, d, command):
|
||||
"""
|
||||
Build up an hg commandline based on ud
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
|
||||
host = ud.host
|
||||
@@ -126,7 +98,7 @@ class Hg(FetchMethod):
|
||||
hgroot = ud.user + "@" + host + ud.path
|
||||
|
||||
if command == "info":
|
||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
||||
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||
|
||||
options = [];
|
||||
|
||||
@@ -139,22 +111,22 @@ class Hg(FetchMethod):
|
||||
|
||||
if command == "fetch":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
else:
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||
elif command == "pull":
|
||||
# do not pass options list; limiting pull to rev causes the local
|
||||
# repo not to contain it and immediately following "update" command
|
||||
# will crash
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (basecmd, ud.user, ud.pswd, proto)
|
||||
else:
|
||||
cmd = "%s pull" % (ud.basecmd)
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
@@ -163,36 +135,18 @@ class Hg(FetchMethod):
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||
|
||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||
|
||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||
|
||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||
# Found the source, check whether need pull
|
||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Update " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
try:
|
||||
runfetchcmd(updatecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
# Runnning pull in the repo
|
||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
||||
logger.info("Pulling " + ud.url)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
logger.debug(1, "Running %s", pullcmd)
|
||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
||||
runfetchcmd(pullcmd, d)
|
||||
ud.repochanged = True
|
||||
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
# No source found, clone it.
|
||||
if not os.path.exists(ud.moddir):
|
||||
else:
|
||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||
logger.info("Fetch " + ud.url)
|
||||
# check out sources there
|
||||
@@ -209,12 +163,14 @@ class Hg(FetchMethod):
|
||||
logger.debug(1, "Running %s", updatecmd)
|
||||
runfetchcmd(updatecmd, d)
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean the hg dir """
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata == "keep":
|
||||
tar_flags = ""
|
||||
else:
|
||||
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||
|
||||
bb.utils.remove(ud.localpath, True)
|
||||
bb.utils.remove(ud.fullmirror)
|
||||
bb.utils.remove(ud.fullmirror + ".done")
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
@@ -235,41 +191,3 @@ class Hg(FetchMethod):
|
||||
Return a unique key for the url
|
||||
"""
|
||||
return "hg:" + ud.moddir
|
||||
|
||||
def build_mirror_data(self, ud, d):
|
||||
# Generate a mirror tarball if needed
|
||||
if ud.write_tarballs == "1" and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
||||
if os.path.islink(ud.fullmirror):
|
||||
os.unlink(ud.fullmirror)
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
logger.info("Creating tarball of hg repository")
|
||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
|
||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d)
|
||||
|
||||
def localpath(self, ud, d):
|
||||
return ud.pkgdir
|
||||
|
||||
def unpack(self, ud, destdir, d):
|
||||
"""
|
||||
Make a local clone or export for the url
|
||||
"""
|
||||
|
||||
revflag = "-r %s" % ud.revision
|
||||
subdir = ud.parm.get("destsuffix", ud.module)
|
||||
codir = "%s/%s" % (destdir, subdir)
|
||||
|
||||
scmdata = ud.parm.get("scmdata", "")
|
||||
if scmdata != "nokeep":
|
||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
||||
os.chdir(codir)
|
||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d)
|
||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d)
|
||||
else:
|
||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
||||
os.chdir(ud.moddir)
|
||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d)
|
||||
|
||||
@@ -112,7 +112,7 @@ class Local(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, urldata, d):
|
||||
def checkstatus(self, urldata, d):
|
||||
"""
|
||||
Check the status of the url
|
||||
"""
|
||||
|
||||
@@ -48,7 +48,7 @@ class Perforce(FetchMethod):
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
(host, port) = d.getVar('P4PORT', False).split(':')
|
||||
(host, port) = d.getVar('P4PORT').split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
@@ -123,7 +123,7 @@ class Perforce(FetchMethod):
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot[:depot.rfind('/')]
|
||||
path = depot
|
||||
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ class SFTP(FetchMethod):
|
||||
"""Fetch urls"""
|
||||
|
||||
urlo = URI(ud.url)
|
||||
basecmd = 'sftp -oBatchMode=yes'
|
||||
basecmd = 'sftp -oPasswordAuthentication=no'
|
||||
port = ''
|
||||
if urlo.port:
|
||||
port = '-P %d' % urlo.port
|
||||
|
||||
@@ -87,8 +87,7 @@ class SSH(FetchMethod):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
|
||||
os.path.basename(os.path.normpath(path)))
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
|
||||
def download(self, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,7 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -100,442 +96,11 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
import urllib2, socket, httplib
|
||||
from urllib import addinfourl
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
class HTTPConnectionCache(httplib.HTTPConnection):
|
||||
if fetch.connection_cache:
|
||||
def connect(self):
|
||||
"""Connect to the host and port specified in __init__."""
|
||||
|
||||
sock = fetch.connection_cache.get_connection(self.host, self.port)
|
||||
if sock:
|
||||
self.sock = sock
|
||||
else:
|
||||
self.sock = socket.create_connection((self.host, self.port),
|
||||
self.timeout, self.source_address)
|
||||
fetch.connection_cache.add_connection(self.host, self.port, self.sock)
|
||||
|
||||
if self._tunnel_host:
|
||||
self._tunnel()
|
||||
|
||||
class CacheHTTPHandler(urllib2.HTTPHandler):
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnectionCache, req)
|
||||
|
||||
def do_open(self, http_class, req):
|
||||
"""Return an addinfourl object for the request, using http_class.
|
||||
|
||||
http_class must implement the HTTPConnection API from httplib.
|
||||
The addinfourl return value is a file-like object. It also
|
||||
has methods and attributes including:
|
||||
- info(): return a mimetools.Message object for the headers
|
||||
- geturl(): return the original request URL
|
||||
- code: HTTP status code
|
||||
"""
|
||||
host = req.get_host()
|
||||
if not host:
|
||||
raise urlllib2.URLError('no host given')
|
||||
|
||||
h = http_class(host, timeout=req.timeout) # will parse host:port
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
||||
if k not in headers))
|
||||
|
||||
# We want to make an HTTP/1.1 request, but the addinfourl
|
||||
# class isn't prepared to deal with a persistent connection.
|
||||
# It will try to read all remaining data from the socket,
|
||||
# which will block while the server waits for the next request.
|
||||
# So make sure the connection gets closed after the (only)
|
||||
# request.
|
||||
|
||||
# Don't close connection when connection_cache is enabled,
|
||||
if fetch.connection_cache is None:
|
||||
headers["Connection"] = "close"
|
||||
else:
|
||||
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
|
||||
|
||||
headers = dict(
|
||||
(name.title(), val) for name, val in headers.items())
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
proxy_auth_hdr = "Proxy-Authorization"
|
||||
if proxy_auth_hdr in headers:
|
||||
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
|
||||
# Proxy-Authorization should not be sent to origin
|
||||
# server.
|
||||
del headers[proxy_auth_hdr]
|
||||
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
||||
|
||||
try:
|
||||
h.request(req.get_method(), req.get_selector(), req.data, headers)
|
||||
except socket.error, err: # XXX what error?
|
||||
# Don't close connection when cache is enabled.
|
||||
if fetch.connection_cache is None:
|
||||
h.close()
|
||||
raise urllib2.URLError(err)
|
||||
else:
|
||||
try:
|
||||
r = h.getresponse(buffering=True)
|
||||
except TypeError: # buffering kw not supported
|
||||
r = h.getresponse()
|
||||
|
||||
# Pick apart the HTTPResponse object to get the addinfourl
|
||||
# object initialized properly.
|
||||
|
||||
# Wrap the HTTPResponse object in socket's file object adapter
|
||||
# for Windows. That adapter calls recv(), so delegate recv()
|
||||
# to read(). This weird wrapping allows the returned object to
|
||||
# have readline() and readlines() methods.
|
||||
|
||||
# XXX It might be better to extract the read buffering code
|
||||
# out of socket._fileobject() and into a base class.
|
||||
r.recv = r.read
|
||||
|
||||
# no data, just have to read
|
||||
r.read()
|
||||
class fp_dummy(object):
|
||||
def read(self):
|
||||
return ""
|
||||
def readline(self):
|
||||
return ""
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
resp = addinfourl(fp_dummy(), r.msg, req.get_full_url())
|
||||
resp.code = r.status
|
||||
resp.msg = r.reason
|
||||
|
||||
# Close connection when server request it.
|
||||
if fetch.connection_cache is not None:
|
||||
if 'Connection' in r.msg and r.msg['Connection'] == 'close':
|
||||
fetch.connection_cache.remove_connection(h.host, h.port)
|
||||
|
||||
return resp
|
||||
|
||||
def export_proxies(d):
|
||||
variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
|
||||
'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY']
|
||||
exported = False
|
||||
|
||||
for v in variables:
|
||||
if v in os.environ.keys():
|
||||
exported = True
|
||||
else:
|
||||
v_proxy = d.getVar(v, True)
|
||||
if v_proxy is not None:
|
||||
os.environ[v] = v_proxy
|
||||
exported = True
|
||||
|
||||
return exported
|
||||
|
||||
def head_method(self):
|
||||
return "HEAD"
|
||||
|
||||
exported_proxies = export_proxies(d)
|
||||
|
||||
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
|
||||
# see PEP-0476, this causes verification errors on some https servers
|
||||
# so disable by default.
|
||||
import ssl
|
||||
ssl_context = None
|
||||
if hasattr(ssl, '_create_unverified_context'):
|
||||
ssl_context = ssl._create_unverified_context()
|
||||
|
||||
if exported_proxies == True and ssl_context is not None:
|
||||
opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler,
|
||||
urllib2.HTTPSHandler(context=ssl_context))
|
||||
elif exported_proxies == False and ssl_context is not None:
|
||||
opener = urllib2.build_opener(CacheHTTPHandler,
|
||||
urllib2.HTTPSHandler(context=ssl_context))
|
||||
elif exported_proxies == True and ssl_context is None:
|
||||
opener = urllib2.build_opener(urllib2.ProxyHandler, CacheHTTPHandler)
|
||||
else:
|
||||
opener = urllib2.build_opener(CacheHTTPHandler)
|
||||
|
||||
urllib2.Request.get_method = head_method
|
||||
urllib2.install_opener(opener)
|
||||
def checkstatus(self, ud, d):
|
||||
|
||||
uri = ud.url.split(";")[0]
|
||||
fetchcmd = self.basecmd + " --spider '%s'" % uri
|
||||
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
try:
|
||||
urllib2.urlopen(uri)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
pname = ''
|
||||
pver = ''
|
||||
ptype = ''
|
||||
|
||||
mdict = m.groupdict()
|
||||
if 'name' in mdict.keys():
|
||||
pname = mdict['name']
|
||||
if 'pver' in mdict.keys():
|
||||
pver = mdict['pver']
|
||||
if 'type' in mdict.keys():
|
||||
ptype = mdict['type']
|
||||
|
||||
bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
|
||||
|
||||
return (pname, pver, ptype)
|
||||
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.1000.', version)
|
||||
version = re.sub('(beta)+', '.100.', version)
|
||||
version = re.sub('(alpha)+', '.10.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return ""
|
||||
"""
|
||||
valid = 0
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
bb.debug(3, "line['href'] = '%s'" % (line['href']))
|
||||
bb.debug(3, "line = '%s'" % (str(line)))
|
||||
|
||||
newver = self._parse_path(package_regex, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(package_regex, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) < 0:
|
||||
version = newver
|
||||
|
||||
pupver = re.sub('_', '.', version[1])
|
||||
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, pupver or "N/A", current_version[1]))
|
||||
|
||||
if valid:
|
||||
return pupver
|
||||
|
||||
return ""
|
||||
|
||||
def _check_latest_version_by_dir(self, dirver, package, package_regex,
|
||||
current_version, ud, d):
|
||||
"""
|
||||
Scan every directory in order to get upstream version.
|
||||
"""
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group(2)
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
version_dir_new = ['', s.group(2), '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = s.group(1) + s.group(2)
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
ud.user, ud.pswd, {}])
|
||||
|
||||
pupver = self._check_latest_version(uri,
|
||||
package, package_regex, current_version, ud, d)
|
||||
if pupver:
|
||||
version[1] = pupver
|
||||
|
||||
version_dir = version_dir_new
|
||||
|
||||
return version[1]
|
||||
|
||||
def _init_regexes(self, package, ud, d):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
version = self._parse_path(package_regex_comp, package)
|
||||
if version:
|
||||
package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
|
||||
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
|
||||
else:
|
||||
package_custom_regex_comp = None
|
||||
|
||||
return package_custom_regex_comp
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
current_version = ['', d.getVar('PV', True), '']
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return (current_version[1], '')
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ('', '')
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("REGEX_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
|
||||
m = dirver_regex.search(path)
|
||||
if m:
|
||||
pn = d.getVar('PN', True)
|
||||
dirver = m.group('dirver')
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return (self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d), '')
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return (self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d), '')
|
||||
|
||||
@@ -1,390 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(Exception):
|
||||
pass
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage = """%prog [options] [recipename/target recipe:do_task ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
if "BBEVENTLOG" is os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
def bitbake_main(configParams, configuration):
|
||||
|
||||
# Python multiprocessing requires /dev/shm on Linux
|
||||
if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
|
||||
"servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
|
||||
"to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--remote-server' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
return 0
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
@@ -26,10 +26,9 @@ File parsers for the BitBake build tools.
|
||||
|
||||
handlers = []
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import logging
|
||||
import bb
|
||||
import bb.utils
|
||||
import bb.siggen
|
||||
@@ -82,7 +81,7 @@ def update_cache(f):
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
if s not in deps:
|
||||
deps.append(s)
|
||||
@@ -90,7 +89,7 @@ def mark_dependency(d, f):
|
||||
|
||||
def check_dependency(d, f):
|
||||
s = (f, cached_mtime_noerror(f))
|
||||
deps = (d.getVar('__depends', False) or [])
|
||||
deps = (d.getVar('__depends') or [])
|
||||
return s in deps
|
||||
|
||||
def supports(fn, data):
|
||||
@@ -123,12 +122,12 @@ def resolve_file(fn, d):
|
||||
for af in attempts:
|
||||
mark_dependency(d, af)
|
||||
if not newfn:
|
||||
raise IOError(errno.ENOENT, "file %s not found in %s" % (fn, bbpath))
|
||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
||||
fn = newfn
|
||||
|
||||
mark_dependency(d, fn)
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError(errno.ENOENT, "file %s not found" % fn)
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
return fn
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in self.groupd and self.groupd['flag'] != None:
|
||||
return data.getVarFlag(key, self.groupd['flag'], noweakdefault=True)
|
||||
else:
|
||||
return data.getVar(key, False, noweakdefault=True, parsing=True)
|
||||
return data.getVar(key, noweakdefault=True)
|
||||
|
||||
def eval(self, data):
|
||||
groupd = self.groupd
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -136,10 +136,10 @@ class DataNode(AstNode):
|
||||
if flag:
|
||||
data.setVarFlag(key, flag, val, **loginfo)
|
||||
else:
|
||||
data.setVar(key, val, parsing=True, **loginfo)
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%', '______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
@@ -152,13 +152,13 @@ class MethodNode(AstNode):
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(MethodNode.tr_tbl)))
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS', False) or []
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
data.setVar(funcname, text, parsing=True)
|
||||
data.setVar(funcname, text)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, text, parsing=True)
|
||||
data.setVar(self.func_name, text)
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, modulename, body):
|
||||
@@ -175,7 +175,7 @@ class PythonMethodNode(AstNode):
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text, parsing=True)
|
||||
data.setVar(self.function, text)
|
||||
|
||||
class MethodFlagsNode(AstNode):
|
||||
def __init__(self, filename, lineno, key, m):
|
||||
@@ -184,7 +184,7 @@ class MethodFlagsNode(AstNode):
|
||||
self.m = m
|
||||
|
||||
def eval(self, data):
|
||||
if data.getVar(self.key, False):
|
||||
if data.getVar(self.key):
|
||||
# clean up old version of this piece of metadata, as its
|
||||
# flags could cause problems
|
||||
data.setVarFlag(self.key, 'python', None)
|
||||
@@ -209,10 +209,10 @@ class ExportFuncsNode(AstNode):
|
||||
for func in self.n:
|
||||
calledfunc = self.classname + "_" + func
|
||||
|
||||
if data.getVar(func, False) and not data.getVarFlag(func, 'export_func'):
|
||||
if data.getVar(func) and not data.getVarFlag(func, 'export_func'):
|
||||
continue
|
||||
|
||||
if data.getVar(func, False):
|
||||
if data.getVar(func):
|
||||
data.setVarFlag(func, 'python', None)
|
||||
data.setVarFlag(func, 'func', None)
|
||||
|
||||
@@ -224,11 +224,11 @@ class ExportFuncsNode(AstNode):
|
||||
data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag))
|
||||
|
||||
if data.getVarFlag(calledfunc, "python"):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
|
||||
else:
|
||||
if "-" in self.classname:
|
||||
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
|
||||
data.setVar(func, " " + calledfunc + "\n", parsing=True)
|
||||
data.setVar(func, " " + calledfunc + "\n")
|
||||
data.setVarFlag(func, 'export_func', '1')
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
@@ -255,7 +255,7 @@ class BBHandlerNode(AstNode):
|
||||
self.hs = fns.split()
|
||||
|
||||
def eval(self, data):
|
||||
bbhands = data.getVar('__BBHANDLERS', False) or []
|
||||
bbhands = data.getVar('__BBHANDLERS') or []
|
||||
for h in self.hs:
|
||||
bbhands.append(h)
|
||||
data.setVarFlag(h, "handler", 1)
|
||||
@@ -315,22 +315,22 @@ def handleInherit(statements, filename, lineno, m):
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
for var in d.getVar('__BBHANDLERS') or []:
|
||||
# try to add the handler
|
||||
bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
bb.event.register(var, d.getVar(var), (d.getVarFlag(var, "eventmask", True) or "").split())
|
||||
|
||||
bb.event.fire(bb.event.RecipePreFinalise(fn), d)
|
||||
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS", False) or []:
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS', False) or []
|
||||
deltasklist = d.getVar('__BBDELTASKS', False) or []
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
deltasklist = d.getVar('__BBDELTASKS') or []
|
||||
bb.build.add_tasks(tasklist, deltasklist, d)
|
||||
|
||||
bb.parse.siggen.finalise(fn, d, variant)
|
||||
|
||||
@@ -32,7 +32,7 @@ import bb.build, bb.utils
|
||||
from bb import data
|
||||
|
||||
from . import ConfHandler
|
||||
from .. import resolve_file, ast, logger, ParseError
|
||||
from .. import resolve_file, ast, logger
|
||||
from .ConfHandler import include, init
|
||||
|
||||
# For compatibility
|
||||
@@ -48,7 +48,7 @@ __def_regexp__ = re.compile( r"def\s+(\w+).*:" )
|
||||
__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
|
||||
|
||||
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__inpython__ = False
|
||||
__body__ = []
|
||||
__classname__ = ""
|
||||
@@ -69,14 +69,15 @@ def supports(fn, d):
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
file = os.path.join('classes', '%s.bbclass' % file)
|
||||
|
||||
if not os.path.isabs(file):
|
||||
bbpath = d.getVar("BBPATH", True)
|
||||
dname = os.path.dirname(fn)
|
||||
bbpath = "%s:%s" % (dname, d.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
|
||||
for af in attempts:
|
||||
if af != abs_fn:
|
||||
@@ -89,7 +90,7 @@ def inherit(files, fn, lineno, d):
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -119,7 +120,7 @@ def get_statements(filename, absolute_filename, base_name):
|
||||
def handle(fn, d, include):
|
||||
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
|
||||
__body__ = []
|
||||
__infunc__ = []
|
||||
__infunc__ = ""
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
@@ -129,13 +130,13 @@ def handle(fn, d, include):
|
||||
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
__inherit_cache = d.getVar('__inherit_cache', False) or []
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE', False)
|
||||
oldfile = d.getVar('FILE')
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -148,7 +149,7 @@ def handle(fn, d, include):
|
||||
statements = get_statements(fn, abs_fn, base_name)
|
||||
|
||||
# DONE WITH PARSING... time to evaluate
|
||||
if ext != ".bbclass" and abs_fn != oldfile:
|
||||
if ext != ".bbclass":
|
||||
d.setVar('FILE', abs_fn)
|
||||
|
||||
try:
|
||||
@@ -158,15 +159,10 @@ def handle(fn, d, include):
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
if __infunc__:
|
||||
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
|
||||
if __residue__:
|
||||
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
|
||||
|
||||
if ext != ".bbclass" and include == 0:
|
||||
return ast.multi_finalize(fn, d)
|
||||
|
||||
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
|
||||
if oldfile:
|
||||
d.setVar("FILE", oldfile)
|
||||
|
||||
return d
|
||||
@@ -176,8 +172,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
if __infunc__:
|
||||
if s == '}':
|
||||
__body__.append('')
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__)
|
||||
__infunc__ = []
|
||||
ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
|
||||
__infunc__ = ""
|
||||
__body__ = []
|
||||
else:
|
||||
__body__.append(s)
|
||||
@@ -221,8 +217,8 @@ def feeder(lineno, s, fn, root, statements):
|
||||
|
||||
m = __func_start_regexp__.match(s)
|
||||
if m:
|
||||
__infunc__ = [m.group("func") or "__anonymous", fn, lineno]
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__[0], m)
|
||||
__infunc__ = m.group("func") or "__anonymous"
|
||||
ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
|
||||
return
|
||||
|
||||
m = __def_regexp__.match(s)
|
||||
|
||||
@@ -24,11 +24,10 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import errno
|
||||
import re
|
||||
import os
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"""
|
||||
^
|
||||
@@ -59,7 +58,7 @@ __require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
|
||||
def init(data):
|
||||
topdir = data.getVar('TOPDIR', False)
|
||||
topdir = data.getVar('TOPDIR')
|
||||
if not topdir:
|
||||
data.setVar('TOPDIR', os.getcwd())
|
||||
|
||||
@@ -67,43 +66,40 @@ def init(data):
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(parentfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
"""
|
||||
if parentfn == fn: # prevent infinite recursion
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
parentfn = data.expand(parentfn)
|
||||
oldfn = data.expand(oldfn)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(parentfn)
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
bb.parse.handle(fn, data, True)
|
||||
except (IOError, OSError) as exc:
|
||||
if exc.errno == errno.ENOENT:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s" % (error_out, fn), parentfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
else:
|
||||
if error_out:
|
||||
raise ParseError("Could not %s file %s: %s" % (error_out, fn, exc.strerror), parentfn, lineno)
|
||||
else:
|
||||
raise ParseError("Error parsing %s: %s" % (fn, exc.strerror), parentfn, lineno)
|
||||
ret = handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
@@ -119,7 +115,7 @@ def handle(fn, data, include):
|
||||
if include == 0:
|
||||
oldfile = None
|
||||
else:
|
||||
oldfile = data.getVar('FILE', False)
|
||||
oldfile = data.getVar('FILE')
|
||||
|
||||
abs_fn = resolve_file(fn, data)
|
||||
f = open(abs_fn, 'r')
|
||||
|
||||
@@ -64,7 +64,7 @@ class Popen(subprocess.Popen):
|
||||
options.update(kwargs)
|
||||
subprocess.Popen.__init__(self, *args, **options)
|
||||
|
||||
def _logged_communicate(pipe, log, input, extrafiles):
|
||||
def _logged_communicate(pipe, log, input):
|
||||
if pipe.stdin:
|
||||
if input is not None:
|
||||
pipe.stdin.write(input)
|
||||
@@ -79,20 +79,6 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
for fobj, _ in extrafiles:
|
||||
bb.utils.nonblockingfd(fobj.fileno())
|
||||
rin.append(fobj)
|
||||
|
||||
def readextras(selected):
|
||||
for fobj, func in extrafiles:
|
||||
if fobj in selected:
|
||||
try:
|
||||
data = fobj.read()
|
||||
except IOError as err:
|
||||
if err.errno == errno.EAGAIN or err.errno == errno.EWOULDBLOCK:
|
||||
data = None
|
||||
if data is not None:
|
||||
func(data)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
@@ -114,21 +100,15 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
readextras(r)
|
||||
|
||||
finally:
|
||||
log.flush()
|
||||
|
||||
readextras([fobj for fobj, _ in extrafiles])
|
||||
|
||||
if pipe.stdout is not None:
|
||||
pipe.stdout.close()
|
||||
if pipe.stderr is not None:
|
||||
pipe.stderr.close()
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, extrafiles=[], **options):
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
"""Convenience function to run a command and return its output, raising an
|
||||
exception when the command fails"""
|
||||
|
||||
@@ -144,7 +124,7 @@ def run(cmd, input=None, log=None, extrafiles=[], **options):
|
||||
raise CmdError(cmd, exc)
|
||||
|
||||
if log:
|
||||
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
|
||||
stdout, stderr = _logged_communicate(pipe, log, input)
|
||||
else:
|
||||
stdout, stderr = pipe.communicate(input)
|
||||
|
||||
|
||||
@@ -430,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -1096,13 +1096,6 @@ class RunQueue:
|
||||
raise
|
||||
except SystemExit:
|
||||
raise
|
||||
except bb.BBHandledException:
|
||||
try:
|
||||
self.teardown_workers()
|
||||
except:
|
||||
pass
|
||||
self.state = runQueueComplete
|
||||
raise
|
||||
except:
|
||||
logger.error("An uncaught exception occured in runqueue, please see the failure below:")
|
||||
try:
|
||||
@@ -1161,14 +1154,9 @@ class RunQueue:
|
||||
sq_hash.append(self.rqdata.runq_hash[task])
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
try:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
# Handle version with no siginfo parameter
|
||||
except TypeError:
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
|
||||
@@ -1254,6 +1242,8 @@ class RunQueue:
|
||||
prevh = __find_md5__.search(latestmatch).group(0)
|
||||
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
|
||||
bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
|
||||
else:
|
||||
bb.plain("Error, can't find multiple tasks at divergence point? Was there a previously run task?")
|
||||
|
||||
class RunQueueExecute:
|
||||
|
||||
@@ -1280,9 +1270,6 @@ class RunQueueExecute:
|
||||
if rq.fakeworkerpipe:
|
||||
rq.fakeworkerpipe.setrunqueueexec(self)
|
||||
|
||||
if self.number_tasks <= 0:
|
||||
bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
@@ -1346,7 +1333,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1415,7 +1402,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1580,12 +1567,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
|
||||
if not self.rq.fakeworker:
|
||||
try:
|
||||
self.rq.start_fakeworker(self)
|
||||
except OSError as exc:
|
||||
logger.critical("Failed to spawn fakeroot worker to run %s:%s: %s" % (fn, taskname, str(exc)))
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
self.rq.start_fakeworker(self)
|
||||
self.rq.fakeworker.stdin.write("<runtask>" + pickle.dumps((fn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + "</runtask>")
|
||||
self.rq.fakeworker.stdin.flush()
|
||||
else:
|
||||
@@ -1840,7 +1822,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
|
||||
@@ -115,7 +115,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
logger.exception('Running command %s', command)
|
||||
|
||||
@@ -144,9 +144,8 @@ class ProcessServer(Process, BaseImplServer):
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception as exc:
|
||||
if not isinstance(exc, bb.BBHandledException):
|
||||
logger.exception('Running idle function')
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
|
||||
|
||||
@@ -235,7 +235,6 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
@@ -252,9 +251,6 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if retval == None:
|
||||
# the function execute failed; delete it
|
||||
del self._idlefuns[function]
|
||||
pass
|
||||
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
@@ -306,8 +302,6 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
|
||||
_, error = self.connection.runCommand(["setFeatures", self.featureset])
|
||||
if error:
|
||||
# disconnect the client, we can't make the setFeature work
|
||||
self.connection.removeClient()
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
|
||||
@@ -195,12 +195,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
taskdep = dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
data = data + str(uuid.uuid4())
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
@@ -314,12 +308,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -420,21 +408,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
else:
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
b = b_data['runtaskhashes']
|
||||
@@ -510,17 +483,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -41,7 +41,7 @@ class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
@@ -70,7 +70,6 @@ class TaskData:
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
self.allowincomplete = allowincomplete
|
||||
|
||||
self.skiplist = skiplist
|
||||
|
||||
@@ -595,10 +594,9 @@ class TaskData:
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets and not self.allowincomplete:
|
||||
if self.abort and targetid in self.external_targets:
|
||||
raise
|
||||
if not self.allowincomplete:
|
||||
self.remove_buildtarget(targetid)
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
|
||||
@@ -24,30 +24,6 @@ import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
import bb.parse
|
||||
import logging
|
||||
|
||||
class LogRecord():
|
||||
def __enter__(self):
|
||||
logs = []
|
||||
class LogHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
logs.append(record)
|
||||
logger = logging.getLogger("BitBake")
|
||||
handler = LogHandler()
|
||||
self.handler = handler
|
||||
logger.addHandler(handler)
|
||||
return logs
|
||||
def __exit__(self, type, value, traceback):
|
||||
logger = logging.getLogger("BitBake")
|
||||
logger.removeHandler(self.handler)
|
||||
return
|
||||
|
||||
def logContains(item, logs):
|
||||
for l in logs:
|
||||
m = l.getMessage()
|
||||
if item in m:
|
||||
return True
|
||||
return False
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -134,23 +110,17 @@ class DataExpansions(unittest.TestCase):
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo", False), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value_of_foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -196,28 +166,28 @@ class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO", False) is d.getVar("FOO", False))
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO", False) is not d2.getVar("FOO", False))
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo", False)), "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo", False),
|
||||
d.getVar("bar", False))
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -270,13 +240,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
def test_append_unset(self):
|
||||
self.d.setVar("TEST_prepend", "${FOO}:")
|
||||
self.d.setVar("TEST_append", ":val2")
|
||||
self.d.setVar("TEST_append", ":${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar")
|
||||
|
||||
def test_remove(self):
|
||||
self.d.setVar("TEST", "${VAL} ${BAR}")
|
||||
self.d.setVar("TEST_remove", "val")
|
||||
@@ -325,66 +288,13 @@ class TestOverrides(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_one_override_unset(self):
|
||||
self.d.setVar("TEST2_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
|
||||
def test_multiple_combined_overrides(self):
|
||||
self.d.setVar("TEST_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_multiple_overrides_unset(self):
|
||||
self.d.setVar("TEST2_local_foo_bar", "testvalue3")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue3")
|
||||
|
||||
def test_keyexpansion_override(self):
|
||||
self.d.setVar("LOCAL", "local")
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_${LOCAL}", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
def test_rename_override(self):
|
||||
self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a")
|
||||
self.d.setVar("OVERRIDES", "class-target")
|
||||
bb.data.update_data(self.d)
|
||||
self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools")
|
||||
self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a")
|
||||
|
||||
def test_underscore_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_some_val", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
self.d.setVar("OVERRIDES", "foo:bar:some_val")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
class TestKeyExpansion(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("BAR", "foo")
|
||||
|
||||
def test_keyexpand(self):
|
||||
self.d.setVar("VAL_${FOO}", "A")
|
||||
self.d.setVar("VAL_${BAR}", "B")
|
||||
with LogRecord() as logs:
|
||||
bb.data.expandKeys(self.d)
|
||||
self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs))
|
||||
self.assertEqual(self.d.getVar("VAL_foo", True), "A")
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
||||
@@ -24,7 +24,6 @@ import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -315,7 +314,6 @@ class URITest(unittest.TestCase):
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
@@ -327,7 +325,6 @@ class FetcherTest(unittest.TestCase):
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
@@ -393,18 +390,6 @@ class MirrorUriTest(FetcherTest):
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_mirror_of_mirror(self):
|
||||
# Test if mirror of a mirror works
|
||||
mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n"
|
||||
mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n"
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz',
|
||||
'file:///someotherpath/downloads/bitbake-1.0.tar.gz',
|
||||
'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
|
||||
'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
@@ -490,19 +475,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_file_mirror_of_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
os.mkdir(self.dldir + "/some2where")
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
@@ -574,43 +546,6 @@ class FetcherNetworkTest(FetcherTest):
|
||||
os.chdir(os.path.dirname(self.unpackdir))
|
||||
fetcher.unpack(self.unpackdir)
|
||||
|
||||
def test_trusted_network(self):
|
||||
# Ensure trusted_network returns False when the host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the *.host IS in the list.
|
||||
url = "git://Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_two_prefix_wild_trusted_network(self):
|
||||
# Ensure trusted_network returns true when the prefix matches *.host.
|
||||
url = "git://something.git.Someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
|
||||
self.assertTrue(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
def test_wild_untrusted_network(self):
|
||||
# Ensure trusted_network returns False when the host is NOT in the list.
|
||||
url = "git://*.someserver.org/foo;rev=1"
|
||||
self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
|
||||
self.assertFalse(bb.fetch.trusted_network(self.d, url))
|
||||
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
@@ -630,130 +565,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchLatestVersionTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid REGEX_URI and REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver= ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("REGEX_URI", k[2])
|
||||
self.d.setVar("REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
pupver = ud.method.latest_versionstring(ud, self.d)
|
||||
verstring = pupver[0]
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
|
||||
class FetchCheckStatusTest(FetcherTest):
|
||||
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
|
||||
"http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
|
||||
"http://www.cups.org/",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
|
||||
"https://yoctoproject.org/",
|
||||
"https://yoctoproject.org/documentation",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
|
||||
"ftp://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
|
||||
]
|
||||
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
|
||||
def test_wget_checkstatus(self):
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d)
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
|
||||
def test_wget_checkstatus_connection_cache(self):
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
connection_cache = FetchConnectionCache()
|
||||
fetch = bb.fetch2.Fetch(self.test_wget_uris, self.d,
|
||||
connection_cache = connection_cache)
|
||||
|
||||
for u in self.test_wget_uris:
|
||||
ud = fetch.ud[u]
|
||||
m = ud.method
|
||||
ret = m.checkstatus(fetch, ud, self.d)
|
||||
self.assertTrue(ret, msg="URI %s, can't check status" % (u))
|
||||
|
||||
connection_cache.close_connections()
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Test for lib/bb/parse/
|
||||
#
|
||||
# Copyright (C) 2015 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import logging
|
||||
import bb
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('BitBake.TestParse')
|
||||
|
||||
import bb.parse
|
||||
import bb.data
|
||||
import bb.siggen
|
||||
|
||||
class ParseTest(unittest.TestCase):
|
||||
|
||||
testfile = """
|
||||
A = "1"
|
||||
B = "2"
|
||||
do_install() {
|
||||
echo "hello"
|
||||
}
|
||||
|
||||
C = "3"
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
bb.parse.siggen = bb.siggen.init(self.d)
|
||||
|
||||
def parsehelper(self, content, suffix = ".bb"):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = suffix)
|
||||
f.write(content)
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
def test_parse_simple(self):
|
||||
f = self.parsehelper(self.testfile)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("A", True), "1")
|
||||
self.assertEqual(d.getVar("B", True), "2")
|
||||
self.assertEqual(d.getVar("C", True), "3")
|
||||
|
||||
def test_parse_incomplete_function(self):
|
||||
testfileB = self.testfile.replace("}", "")
|
||||
f = self.parsehelper(testfileB)
|
||||
with self.assertRaises(bb.parse.ParseError):
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
|
||||
overridetest = """
|
||||
RRECOMMENDS_${PN} = "a"
|
||||
RRECOMMENDS_${PN}_libc = "b"
|
||||
OVERRIDES = "libc:${PN}"
|
||||
PN = "gtk+"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "b")
|
||||
d.setVar("RRECOMMENDS_gtk+", "c")
|
||||
self.assertEqual(d.getVar("RRECOMMENDS", True), "c")
|
||||
|
||||
overridetest2 = """
|
||||
EXTRA_OECONF = ""
|
||||
EXTRA_OECONF_class-target = "b"
|
||||
EXTRA_OECONF_append = " c"
|
||||
"""
|
||||
|
||||
def test_parse_overrides(self):
|
||||
f = self.parsehelper(self.overridetest2)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
d.appendVar("EXTRA_OECONF", " d")
|
||||
d.setVar("OVERRIDES", "class-target")
|
||||
self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d")
|
||||
|
||||
overridetest3 = """
|
||||
DESCRIPTION = "A"
|
||||
DESCRIPTION_${PN}-dev = "${DESCRIPTION} B"
|
||||
PN = "bc"
|
||||
"""
|
||||
|
||||
def test_parse_combinations(self):
|
||||
f = self.parsehelper(self.overridetest3)
|
||||
d = bb.parse.handle(f.name, self.d)['']
|
||||
bb.data.expandKeys(d)
|
||||
self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B")
|
||||
d.setVar("DESCRIPTION", "E")
|
||||
d.setVar("DESCRIPTION_bc-dev", "C D")
|
||||
d.setVar("OVERRIDES", "bc-dev")
|
||||
self.assertEqual(d.getVar("DESCRIPTION", True), "C D")
|
||||
|
||||
|
||||
classextend = """
|
||||
VAR_var_override1 = "B"
|
||||
EXTRA = ":override1"
|
||||
OVERRIDES = "nothing${EXTRA}"
|
||||
|
||||
BBCLASSEXTEND = "###CLASS###"
|
||||
"""
|
||||
classextend_bbclass = """
|
||||
EXTRA = ""
|
||||
python () {
|
||||
d.renameVar("VAR_var", "VAR_var2")
|
||||
}
|
||||
"""
|
||||
|
||||
#
|
||||
# Test based upon a real world data corruption issue. One
|
||||
# data store changing a variable poked through into a different data
|
||||
# store. This test case replicates that issue where the value 'B' would
|
||||
# become unset/disappear.
|
||||
#
|
||||
def test_parse_classextend_contamination(self):
|
||||
cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
|
||||
#clsname = os.path.basename(cls.name).replace(".bbclass", "")
|
||||
self.classextend = self.classextend.replace("###CLASS###", cls.name)
|
||||
f = self.parsehelper(self.classextend)
|
||||
alldata = bb.parse.handle(f.name, self.d)
|
||||
d1 = alldata['']
|
||||
d2 = alldata[cls.name]
|
||||
self.assertEqual(d1.getVar("VAR_var", True), "B")
|
||||
self.assertEqual(d2.getVar("VAR_var", True), None)
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -37,10 +36,6 @@ class VerCmpString(unittest.TestCase):
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
|
||||
self.assertTrue(result > 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
@@ -106,273 +101,3 @@ class Path(unittest.TestCase):
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
|
||||
class EditMetadataFile(unittest.TestCase):
|
||||
_origfile = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
def _testeditfile(self, varvalues, compareto, dummyvars=None):
|
||||
if dummyvars is None:
|
||||
dummyvars = []
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
tf.write(self._origfile)
|
||||
tf.close()
|
||||
try:
|
||||
varcalls = []
|
||||
def handle_file(varname, origvalue, op, newlines):
|
||||
self.assertIn(varname, varvalues, 'Callback called for variable %s not in the list!' % varname)
|
||||
self.assertNotIn(varname, dummyvars, 'Callback called for variable %s in dummy list!' % varname)
|
||||
varcalls.append(varname)
|
||||
return varvalues[varname]
|
||||
|
||||
bb.utils.edit_metadata_file(tf.name, varvalues.keys(), handle_file)
|
||||
with open(tf.name) as f:
|
||||
modfile = f.readlines()
|
||||
# Ensure the output matches the expected output
|
||||
self.assertEqual(compareto.splitlines(True), modfile)
|
||||
# Ensure the callback function was called for every variable we asked for
|
||||
# (plus allow testing behaviour when a requested variable is not present)
|
||||
self.assertEqual(sorted(varvalues.keys()), sorted(varcalls + dummyvars))
|
||||
finally:
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def test_edit_metadata_file_nochange(self):
|
||||
# Test file doesn't get modified with nothing to do
|
||||
self._testeditfile({}, self._origfile)
|
||||
# Test file doesn't get modified with only dummy variables
|
||||
self._testeditfile({'DUMMY1': ('should_not_set', None, 0, True),
|
||||
'DUMMY2': ('should_not_set_again', None, 0, True)}, self._origfile, dummyvars=['DUMMY1', 'DUMMY2'])
|
||||
# Test file doesn't get modified with some the same values
|
||||
self._testeditfile({'THIS': ('that', None, 0, True),
|
||||
'OTHER': ('anothervalue', None, 0, True),
|
||||
'MULTILINE3': (' c1 c2 c3', None, 4, False)}, self._origfile)
|
||||
|
||||
def test_edit_metadata_file_1(self):
|
||||
|
||||
newfile1 = """
|
||||
# A comment
|
||||
HELLO = "newvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'HELLO': ('newvalue', None, 4, True)}, newfile1)
|
||||
|
||||
|
||||
def test_edit_metadata_file_2(self):
|
||||
|
||||
newfile2 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = " \\
|
||||
d1 \\
|
||||
d2 \\
|
||||
d3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = "nowsingle"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
self._testeditfile({'MULTILINE': (['d1','d2','d3'], None, 4, False),
|
||||
'MULTILINE3': ('nowsingle', None, 4, True),
|
||||
'NOTPRESENT': (['a', 'b'], None, 4, False)}, newfile2, dummyvars=['NOTPRESENT'])
|
||||
|
||||
|
||||
def test_edit_metadata_file_3(self):
|
||||
|
||||
newfile3 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = "yetanothervalue"
|
||||
|
||||
MULTILINE = "e1 \\
|
||||
e2 \\
|
||||
e3 \\
|
||||
"
|
||||
|
||||
MULTILINE2 := "f1 \\
|
||||
\tf2 \\
|
||||
\t"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
do_functionname() {
|
||||
othercommand_one a b c
|
||||
othercommand_two d e f
|
||||
}
|
||||
"""
|
||||
|
||||
self._testeditfile({'do_functionname()': (['othercommand_one a b c', 'othercommand_two d e f'], None, 4, False),
|
||||
'MULTILINE2': (['f1', 'f2'], None, '\t', True),
|
||||
'MULTILINE': (['e1', 'e2', 'e3'], None, -1, True),
|
||||
'THIS': (None, None, 0, False),
|
||||
'OTHER': ('yetanothervalue', None, 0, True)}, newfile3)
|
||||
|
||||
|
||||
def test_edit_metadata_file_4(self):
|
||||
|
||||
newfile4 = """
|
||||
# A comment
|
||||
HELLO = "oldvalue"
|
||||
|
||||
THIS = "that"
|
||||
|
||||
# Another comment
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
"""
|
||||
|
||||
self._testeditfile({'NOCHANGE': (None, None, 0, False),
|
||||
'MULTILINE3': (None, None, 0, False),
|
||||
'THIS': ('that', None, 0, False),
|
||||
'do_functionname()': (None, None, 0, False)}, newfile4)
|
||||
|
||||
|
||||
def test_edit_metadata(self):
|
||||
newfile5 = """
|
||||
# A comment
|
||||
HELLO = "hithere"
|
||||
|
||||
# A new comment
|
||||
THIS += "that"
|
||||
|
||||
# Another comment
|
||||
NOCHANGE = "samevalue"
|
||||
OTHER = 'anothervalue'
|
||||
|
||||
MULTILINE = "a1 \\
|
||||
a2 \\
|
||||
a3"
|
||||
|
||||
MULTILINE2 := " \\
|
||||
b1 \\
|
||||
b2 \\
|
||||
b3 \\
|
||||
"
|
||||
|
||||
|
||||
MULTILINE3 = " \\
|
||||
c1 \\
|
||||
c2 \\
|
||||
c3 \\
|
||||
"
|
||||
|
||||
NEWVAR = "value"
|
||||
|
||||
do_functionname() {
|
||||
command1 ${VAL1} ${VAL2}
|
||||
command2 ${VAL3} ${VAL4}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def handle_var(varname, origvalue, op, newlines):
|
||||
if varname == 'THIS':
|
||||
newlines.append('# A new comment\n')
|
||||
elif varname == 'do_functionname()':
|
||||
newlines.append('NEWVAR = "value"\n')
|
||||
newlines.append('\n')
|
||||
valueitem = varvalues.get(varname, None)
|
||||
if valueitem:
|
||||
return valueitem
|
||||
else:
|
||||
return (origvalue, op, 0, True)
|
||||
|
||||
varvalues = {'HELLO': ('hithere', None, 0, True), 'THIS': ('that', '+=', 0, True)}
|
||||
varlist = ['HELLO', 'THIS', 'do_functionname()']
|
||||
(updated, newlines) = bb.utils.edit_metadata(self._origfile.splitlines(True), varlist, handle_var)
|
||||
self.assertTrue(updated, 'List should be updated but isn\'t')
|
||||
self.assertEqual(newlines, newfile5.splitlines(True))
|
||||
|
||||
@@ -95,7 +95,7 @@ class TinfoilConfigParameters(ConfigParameters):
|
||||
self.initial_options = options
|
||||
super(TinfoilConfigParameters, self).__init__()
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
class DummyOptions:
|
||||
def __init__(self, initial_options):
|
||||
for key, val in initial_options.items():
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -309,7 +309,7 @@ class Parameters:
|
||||
|
||||
def hob_conf_filter(fn, data):
|
||||
if fn.endswith("/local.conf"):
|
||||
distro = data.getVar("DISTRO_HOB", False)
|
||||
distro = data.getVar("DISTRO_HOB")
|
||||
if distro:
|
||||
if distro != "defaultsetup":
|
||||
data.setVar("DISTRO", distro)
|
||||
@@ -320,13 +320,13 @@ def hob_conf_filter(fn, data):
|
||||
"BB_NUMBER_THREADS_HOB", "PARALLEL_MAKE_HOB", "DL_DIR_HOB", \
|
||||
"SSTATE_DIR_HOB", "SSTATE_MIRRORS_HOB", "INCOMPATIBLE_LICENSE_HOB"]
|
||||
for key in keys:
|
||||
var_hob = data.getVar(key, False)
|
||||
var_hob = data.getVar(key)
|
||||
if var_hob:
|
||||
data.setVar(key.split("_HOB")[0], var_hob)
|
||||
return
|
||||
|
||||
if fn.endswith("/bblayers.conf"):
|
||||
layers = data.getVar("BBLAYERS_HOB", False)
|
||||
layers = data.getVar("BBLAYERS_HOB")
|
||||
if layers:
|
||||
data.setVar("BBLAYERS", layers)
|
||||
return
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import gobject
|
||||
import gtk
|
||||
import Queue
|
||||
@@ -216,12 +215,6 @@ def main(server, eventHandler, params):
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
try:
|
||||
gtk.init_check()
|
||||
except RuntimeError:
|
||||
sys.stderr.write("Please set DISPLAY variable before running this command \n")
|
||||
return
|
||||
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
@@ -243,7 +236,7 @@ def main(server, eventHandler, params):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
break
|
||||
|
||||
@@ -271,7 +271,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
server.terminateServer()
|
||||
return
|
||||
|
||||
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
|
||||
if consolelogfile and not params.options.show_environment:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
@@ -284,7 +284,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if not params.observe_only:
|
||||
params.updateFromServer(server)
|
||||
params.updateToServer(server, os.environ.copy())
|
||||
params.updateToServer(server)
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
|
||||
@@ -361,13 +361,13 @@ class NCursesUI:
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
def main(server, eventHandler):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
|
||||
return
|
||||
ui = NCursesUI()
|
||||
try:
|
||||
curses.wrapper(ui.main, server, eventHandler, params)
|
||||
curses.wrapper(ui.main, server, eventHandler)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -41,9 +41,9 @@ import sys
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
|
||||
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
|
||||
|
||||
@@ -58,14 +58,19 @@ def _log_settings_from_server(server):
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
return includelogs, loglines, consolelogfile
|
||||
|
||||
return includelogs, loglines
|
||||
|
||||
def main(server, eventHandler, params ):
|
||||
|
||||
includelogs, loglines = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
@@ -74,21 +79,10 @@ def main(server, eventHandler, params ):
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
includelogs, loglines, consolelogfile = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
if not params.observe_only:
|
||||
logger.error("ToasterUI can only work in observer mode")
|
||||
return 1
|
||||
return
|
||||
|
||||
|
||||
main.shutdown = 0
|
||||
@@ -101,16 +95,6 @@ def main(server, eventHandler, params ):
|
||||
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
if buildinfohelper.brbe is not None and consolelogfile:
|
||||
# if we are under managed mode we have no other UI and we need to write our own file
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
@@ -130,23 +114,17 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
logger.warn("Logfile for task %s" % event.logfile)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.build.TaskBase):
|
||||
logger.info(event._message)
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno == -1:
|
||||
event.levelno = format.ERROR
|
||||
|
||||
buildinfohelper.store_log_event(event)
|
||||
if event.levelno >= format.ERROR:
|
||||
errors = errors + 1
|
||||
return_value = 1
|
||||
elif event.levelno == format.WARNING:
|
||||
warnings = warnings + 1
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
@@ -160,6 +138,7 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
return_value = 1
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
bb.error("Logfile of failure stored in: %s" % logfile)
|
||||
@@ -182,13 +161,9 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
continue
|
||||
if isinstance(event, bb.event.MultipleProviders):
|
||||
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
|
||||
event._item,
|
||||
", ".join(event._candidates))
|
||||
logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
@@ -238,39 +213,35 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.event.BuildCompleted, bb.command.CommandFailed)):
|
||||
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
|
||||
# update the build info helper on BuildCompleted, not on CommandXXX
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done 1, brbe: %s" % buildinfohelper.brbe )
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
|
||||
logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
|
||||
server.terminateServer()
|
||||
else:
|
||||
logger.debug(1, "ToasterUI prepared for new build")
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
logger.info("ToasterUI build done 2")
|
||||
if isinstance(event, (bb.event.BuildCompleted)):
|
||||
continue
|
||||
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = "Command Failed " + event.error
|
||||
event.pathname = ""
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close()
|
||||
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
|
||||
print "we are under BuildEnvironment management - after the build, we exit"
|
||||
server.terminateServer()
|
||||
else:
|
||||
print "prepared for new build"
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
continue
|
||||
|
||||
@@ -287,12 +258,8 @@ def main(server, eventHandler, params ):
|
||||
buildinfohelper.store_missed_state_tasks(event)
|
||||
elif event.type == "ImageFileSize":
|
||||
buildinfohelper.update_target_image_file(event)
|
||||
elif event.type == "ArtifactFileSize":
|
||||
buildinfohelper.update_artifact_image_file(event)
|
||||
elif event.type == "LicenseManifestPath":
|
||||
buildinfohelper.store_license_manifest_path(event)
|
||||
else:
|
||||
logger.error("Unprocessed MetadataEvent %s " % str(event))
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
@@ -316,7 +283,6 @@ def main(server, eventHandler, params ):
|
||||
continue
|
||||
|
||||
logger.error("Unknown event: %s", event)
|
||||
return_value += 1
|
||||
|
||||
except EnvironmentError as ioerror:
|
||||
# ignore interrupted io
|
||||
@@ -326,32 +292,13 @@ def main(server, eventHandler, params ):
|
||||
main.shutdown = 1
|
||||
pass
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
logger.error(e)
|
||||
import traceback
|
||||
from pprint import pformat
|
||||
exception_data = traceback.format_exc()
|
||||
logger.error("%s\n%s" % (e, exception_data))
|
||||
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
if tb is not None:
|
||||
curr = tb
|
||||
while curr is not None:
|
||||
logger.warn("Error data dump %s\n%s\n" % (traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals)))
|
||||
curr = curr.tb_next
|
||||
|
||||
# save them to database, if possible; if it fails, we already logged to console.
|
||||
try:
|
||||
buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
|
||||
except Exception as ce:
|
||||
logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
|
||||
|
||||
# make sure we return with an error
|
||||
return_value += 1
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
if interrupted:
|
||||
if return_value == 0:
|
||||
return_value += 1
|
||||
return_value = 1
|
||||
|
||||
logger.warn("Return value is %d", return_value)
|
||||
return return_value
|
||||
|
||||
@@ -106,12 +106,7 @@ class BBUIEventQueue:
|
||||
|
||||
self.server.timeout = 1
|
||||
while not self.server.quit:
|
||||
try:
|
||||
self.server.handle_request()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
|
||||
|
||||
self.server.handle_request()
|
||||
self.server.server_close()
|
||||
|
||||
def system_quit( self ):
|
||||
|
||||
@@ -54,9 +54,6 @@ def set_context(ctx):
|
||||
# Context used in better_exec, eval
|
||||
_context = clean_context()
|
||||
|
||||
class VersionStringException(Exception):
|
||||
"""Exception raised when an invalid version specification is found"""
|
||||
|
||||
def explode_version(s):
|
||||
r = []
|
||||
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
|
||||
@@ -132,28 +129,6 @@ def vercmp_string(a, b):
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
|
||||
def vercmp_string_op(a, b, op):
|
||||
"""
|
||||
Compare two versions and check if the specified comparison operator matches the result of the comparison.
|
||||
This function is fairly liberal about what operators it will accept since there are a variety of styles
|
||||
depending on the context.
|
||||
"""
|
||||
res = vercmp_string(a, b)
|
||||
if op in ('=', '=='):
|
||||
return res == 0
|
||||
elif op == '<=':
|
||||
return res <= 0
|
||||
elif op == '>=':
|
||||
return res >= 0
|
||||
elif op in ('>', '>>'):
|
||||
return res > 0
|
||||
elif op in ('<', '<<'):
|
||||
return res < 0
|
||||
elif op == '!=':
|
||||
return res != 0
|
||||
else:
|
||||
raise VersionStringException('Unsupported comparison operator "%s"' % op)
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
@@ -214,7 +189,6 @@ def explode_dep_versions2(s):
|
||||
i = i[1:]
|
||||
else:
|
||||
# This is an unsupported case!
|
||||
raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
|
||||
lastcmp = (i or "")
|
||||
i = ""
|
||||
i.strip()
|
||||
@@ -569,7 +543,7 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if removed_vars:
|
||||
if len(removed_vars):
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
@@ -929,17 +903,11 @@ def cpu_count():
|
||||
def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
def process_profilelog(fn, pout = None):
|
||||
# Either call with a list of filenames and set pout or a filename and optionally pout.
|
||||
if not pout:
|
||||
pout = fn + '.processed'
|
||||
pout = open(pout, 'w')
|
||||
def process_profilelog(fn):
|
||||
pout = open(fn + '.processed', 'w')
|
||||
|
||||
import pstats
|
||||
if isinstance(fn, list):
|
||||
p = pstats.Stats(*fn, stream=pout)
|
||||
else:
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
@@ -967,306 +935,3 @@ def multiprocessingpool(*args, **kwargs):
|
||||
|
||||
return multiprocessing.Pool(*args, **kwargs)
|
||||
|
||||
def exec_flat_python_func(func, *args, **kwargs):
|
||||
"""Execute a flat python function (defined with def funcname(args):...)"""
|
||||
# Prepare a small piece of python code which calls the requested function
|
||||
# To do this we need to prepare two things - a set of variables we can use to pass
|
||||
# the values of arguments into the calling function, and the list of arguments for
|
||||
# the function being called
|
||||
context = {}
|
||||
funcargs = []
|
||||
# Handle unnamed arguments
|
||||
aidx = 1
|
||||
for arg in args:
|
||||
argname = 'arg_%s' % aidx
|
||||
context[argname] = arg
|
||||
funcargs.append(argname)
|
||||
aidx += 1
|
||||
# Handle keyword arguments
|
||||
context.update(kwargs)
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
|
||||
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
|
||||
comp = bb.utils.better_compile(code, '<string>', '<string>')
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
return context['retval']
|
||||
|
||||
def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
|
||||
"""Edit lines from a recipe or config file and modify one or more
|
||||
specified variable values set in the file using a specified callback
|
||||
function. Lines are expected to have trailing newlines.
|
||||
Parameters:
|
||||
meta_lines: lines from the file; can be a list or an iterable
|
||||
(e.g. file pointer)
|
||||
variables: a list of variable names to look for. Functions
|
||||
may also be specified, but must be specified with '()' at
|
||||
the end of the name. Note that the function doesn't have
|
||||
any intrinsic understanding of _append, _prepend, _remove,
|
||||
or overrides, so these are considered as part of the name.
|
||||
These values go into a regular expression, so regular
|
||||
expression syntax is allowed.
|
||||
varfunc: callback function called for every variable matching
|
||||
one of the entries in the variables parameter. The function
|
||||
should take four arguments:
|
||||
varname: name of variable matched
|
||||
origvalue: current value in file
|
||||
op: the operator (e.g. '+=')
|
||||
newlines: list of lines up to this point. You can use
|
||||
this to prepend lines before this variable setting
|
||||
if you wish.
|
||||
and should return a three-element tuple:
|
||||
newvalue: new value to substitute in, or None to drop
|
||||
the variable setting entirely. (If the removal
|
||||
results in two consecutive blank lines, one of the
|
||||
blank lines will also be dropped).
|
||||
newop: the operator to use - if you specify None here,
|
||||
the original operation will be used.
|
||||
indent: number of spaces to indent multi-line entries,
|
||||
or -1 to indent up to the level of the assignment
|
||||
and opening quote, or a string to use as the indent.
|
||||
minbreak: True to allow the first element of a
|
||||
multi-line value to continue on the same line as
|
||||
the assignment, False to indent before the first
|
||||
element.
|
||||
match_overrides: True to match items with _overrides on the end,
|
||||
False otherwise
|
||||
Returns a tuple:
|
||||
updated:
|
||||
True if changes were made, False otherwise.
|
||||
newlines:
|
||||
Lines after processing
|
||||
"""
|
||||
|
||||
var_res = {}
|
||||
if match_overrides:
|
||||
override_re = '(_[a-zA-Z0-9-_$(){}]+)?'
|
||||
else:
|
||||
override_re = ''
|
||||
for var in variables:
|
||||
if var.endswith('()'):
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
|
||||
else:
|
||||
var_res[var] = re.compile('^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
varlines = []
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
var_end = ''
|
||||
|
||||
def handle_var_end():
|
||||
prerun_newlines = newlines[:]
|
||||
op = varset_start[len(in_var):].strip()
|
||||
(newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
|
||||
changed = (prerun_newlines != newlines)
|
||||
|
||||
if newvalue is None:
|
||||
# Drop the value
|
||||
return True
|
||||
elif newvalue != full_value or (newop not in [None, op]):
|
||||
if newop not in [None, op]:
|
||||
# Callback changed the operator
|
||||
varset_new = "%s %s" % (in_var, newop)
|
||||
else:
|
||||
varset_new = varset_start
|
||||
|
||||
if isinstance(indent, (int, long)):
|
||||
if indent == -1:
|
||||
indentspc = ' ' * (len(varset_new) + 2)
|
||||
else:
|
||||
indentspc = ' ' * indent
|
||||
else:
|
||||
indentspc = indent
|
||||
if in_var.endswith('()'):
|
||||
# A function definition
|
||||
if isinstance(newvalue, list):
|
||||
newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
|
||||
else:
|
||||
if not newvalue.startswith('\n'):
|
||||
newvalue = '\n' + newvalue
|
||||
if not newvalue.endswith('\n'):
|
||||
newvalue = newvalue + '\n'
|
||||
newlines.append('%s {%s}\n' % (varset_new, newvalue))
|
||||
else:
|
||||
# Normal variable
|
||||
if isinstance(newvalue, list):
|
||||
if not newvalue:
|
||||
# Empty list -> empty string
|
||||
newlines.append('%s ""\n' % varset_new)
|
||||
elif minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_new)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (indentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_new, newvalue))
|
||||
return True
|
||||
else:
|
||||
# Put the old lines back where they were
|
||||
newlines.extend(varlines)
|
||||
# If newlines was touched by the function, we'll need to return True
|
||||
return changed
|
||||
|
||||
checkspc = False
|
||||
|
||||
for line in meta_lines:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
varlines.append(line)
|
||||
if in_var.endswith('()'):
|
||||
full_value += '\n' + value
|
||||
else:
|
||||
full_value += value[:-1]
|
||||
if value.endswith(var_end):
|
||||
if in_var.endswith('()'):
|
||||
if full_value.count('{') - full_value.count('}') >= 0:
|
||||
continue
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
else:
|
||||
skip = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
res = var_re.match(line)
|
||||
if res:
|
||||
isfunc = varname.endswith('()')
|
||||
if isfunc:
|
||||
splitvalue = line.split('{', 1)
|
||||
var_end = '}'
|
||||
else:
|
||||
var_end = res.groups()[-1]
|
||||
splitvalue = line.split(var_end, 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if not isfunc and value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
varlines = [line]
|
||||
in_var = res.group(1)
|
||||
if isfunc:
|
||||
in_var += '()'
|
||||
if value.endswith(var_end):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
checkspc = True
|
||||
in_var = None
|
||||
skip = True
|
||||
break
|
||||
if not skip:
|
||||
if checkspc:
|
||||
checkspc = False
|
||||
if newlines[-1] == '\n' and line == '\n':
|
||||
# Squash blank line if there are two consecutive blanks after a removal
|
||||
continue
|
||||
newlines.append(line)
|
||||
return (updated, newlines)
|
||||
|
||||
|
||||
def edit_metadata_file(meta_file, variables, varfunc):
|
||||
"""Edit a recipe or config file and modify one or more specified
|
||||
variable values set in the file using a specified callback function.
|
||||
The file is only written to if the value(s) actually change.
|
||||
This is basically the file version of edit_metadata(), see that
|
||||
function's description for parameter/usage information.
|
||||
Returns True if the file was written to, False otherwise.
|
||||
"""
|
||||
with open(meta_file, 'r') as f:
|
||||
(updated, newlines) = edit_metadata(f, variables, varfunc)
|
||||
if updated:
|
||||
with open(meta_file, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
|
||||
def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
"""Edit bblayers.conf, adding and/or removing layers"""
|
||||
|
||||
import fnmatch
|
||||
|
||||
def remove_trailing_sep(pth):
|
||||
if pth and pth[-1] == os.sep:
|
||||
pth = pth[:-1]
|
||||
return pth
|
||||
|
||||
def layerlist_param(value):
|
||||
if not value:
|
||||
return []
|
||||
elif isinstance(value, list):
|
||||
return [remove_trailing_sep(x) for x in value]
|
||||
else:
|
||||
return [remove_trailing_sep(value)]
|
||||
|
||||
notadded = []
|
||||
notremoved = []
|
||||
|
||||
addlayers = layerlist_param(add)
|
||||
removelayers = layerlist_param(remove)
|
||||
|
||||
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
|
||||
bblayercalls = []
|
||||
|
||||
def handle_bblayers(varname, origvalue, op, newlines):
|
||||
bblayercalls.append(varname)
|
||||
updated = False
|
||||
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
|
||||
if removelayers:
|
||||
for removelayer in removelayers:
|
||||
matched = False
|
||||
for layer in bblayers:
|
||||
if fnmatch.fnmatch(layer, removelayer):
|
||||
updated = True
|
||||
matched = True
|
||||
bblayers.remove(layer)
|
||||
break
|
||||
if not matched:
|
||||
notremoved.append(removelayer)
|
||||
if addlayers:
|
||||
for addlayer in addlayers:
|
||||
if addlayer not in bblayers:
|
||||
updated = True
|
||||
bblayers.append(addlayer)
|
||||
else:
|
||||
notadded.append(addlayer)
|
||||
|
||||
if updated:
|
||||
return (bblayers, None, 2, False)
|
||||
else:
|
||||
return (origvalue, None, 2, False)
|
||||
|
||||
edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
|
||||
|
||||
if not bblayercalls:
|
||||
raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
|
||||
|
||||
return (notadded, notremoved)
|
||||
|
||||
|
||||
def get_file_layer(filename, d):
|
||||
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
|
||||
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
||||
collection_res = {}
|
||||
for collection in collections:
|
||||
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
|
||||
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, filename):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
return match
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
Behold, mortal, the origins of Beautiful Soup...
|
||||
================================================
|
||||
|
||||
Leonard Richardson is the primary programmer.
|
||||
|
||||
Aaron DeVore is awesome.
|
||||
|
||||
Mark Pilgrim provided the encoding detection code that forms the base
|
||||
of UnicodeDammit.
|
||||
|
||||
Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
|
||||
Soup 4 working under Python 3.
|
||||
|
||||
Simon Willison wrote soupselect, which was used to make Beautiful Soup
|
||||
support CSS selectors.
|
||||
|
||||
Sam Ruby helped with a lot of edge cases.
|
||||
|
||||
Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
|
||||
work in solving the nestable tags conundrum.
|
||||
|
||||
An incomplete list of people have contributed patches to Beautiful
|
||||
Soup:
|
||||
|
||||
Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
|
||||
Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
|
||||
Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
|
||||
Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
|
||||
Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
|
||||
Samastur, Jouni Sepp<70>nen, Alexander Schmolck, Andy Theyers, Glyn
|
||||
Webster, Paul Wright, Danny Yoo
|
||||
|
||||
An incomplete list of people who made suggestions or found bugs or
|
||||
found ways to break Beautiful Soup:
|
||||
|
||||
Hanno B<>ck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
|
||||
Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
|
||||
Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
|
||||
warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
|
||||
Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
|
||||
Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
|
||||
Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
|
||||
Sousa Rocha, Yichun Wei, Per Vognsen
|
||||
@@ -1,26 +0,0 @@
|
||||
Beautiful Soup is made available under the MIT license:
|
||||
|
||||
Copyright (c) 2004-2012 Leonard Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE, DAMMIT.
|
||||
|
||||
Beautiful Soup incorporates code from the html5lib library, which is
|
||||
also made available under the MIT license.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,406 +0,0 @@
|
||||
"""Beautiful Soup
|
||||
Elixir and Tonic
|
||||
"The Screen-Scraper's Friend"
|
||||
http://www.crummy.com/software/BeautifulSoup/
|
||||
|
||||
Beautiful Soup uses a pluggable XML or HTML parser to parse a
|
||||
(possibly invalid) document into a tree representation. Beautiful Soup
|
||||
provides provides methods and Pythonic idioms that make it easy to
|
||||
navigate, search, and modify the parse tree.
|
||||
|
||||
Beautiful Soup works with Python 2.6 and up. It works better if lxml
|
||||
and/or html5lib is installed.
|
||||
|
||||
For more than you ever wanted to know about Beautiful Soup, see the
|
||||
documentation:
|
||||
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .builder import builder_registry, ParserRejectedMarkup
|
||||
from .dammit import UnicodeDammit
|
||||
from .element import (
|
||||
CData,
|
||||
Comment,
|
||||
DEFAULT_OUTPUT_ENCODING,
|
||||
Declaration,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
PageElement,
|
||||
ProcessingInstruction,
|
||||
ResultSet,
|
||||
SoupStrainer,
|
||||
Tag,
|
||||
)
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
This class defines the basic interface called by the tree builders.
|
||||
|
||||
These methods will be called by the parser:
|
||||
reset()
|
||||
feed(markup)
|
||||
|
||||
The tree builder may call these methods from its feed() implementation:
|
||||
handle_starttag(name, attrs) # See note about return value
|
||||
handle_endtag(name)
|
||||
handle_data(data) # Appends to the current data node
|
||||
endData(containerClass=NavigableString) # Ends the current data node
|
||||
|
||||
No matter how complicated the underlying parser is, you should be
|
||||
able to build a tree using 'start tag' events, 'end tag' events,
|
||||
'data' events, and "done with data" events.
|
||||
|
||||
If you encounter an empty-element tag (aka a self-closing tag,
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
|
||||
if 'convertEntities' in kwargs:
|
||||
warnings.warn(
|
||||
"BS4 does not respect the convertEntities argument to the "
|
||||
"BeautifulSoup constructor. Entities are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'markupMassage' in kwargs:
|
||||
del kwargs['markupMassage']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the markupMassage argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for any necessary markup massage.")
|
||||
|
||||
if 'smartQuotesTo' in kwargs:
|
||||
del kwargs['smartQuotesTo']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the smartQuotesTo argument to the "
|
||||
"BeautifulSoup constructor. Smart quotes are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'selfClosingTags' in kwargs:
|
||||
del kwargs['selfClosingTags']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the selfClosingTags argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for understanding self-closing tags.")
|
||||
|
||||
if 'isHTML' in kwargs:
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
warnings.warn(
|
||||
'The "%s" argument to the BeautifulSoup constructor '
|
||||
'has been renamed to "%s."' % (old_name, new_name))
|
||||
value = kwargs[old_name]
|
||||
del kwargs[old_name]
|
||||
return value
|
||||
return None
|
||||
|
||||
parse_only = parse_only or deprecated_argument(
|
||||
"parseOnlyThese", "parse_only")
|
||||
|
||||
from_encoding = from_encoding or deprecated_argument(
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
builder_class = builder_registry.lookup(*features)
|
||||
if builder_class is None:
|
||||
raise FeatureNotFound(
|
||||
"Couldn't find a tree builder with the features you "
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
|
||||
self.parse_only = parse_only
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
elif len(markup) <= 256:
|
||||
# Print out warnings for a couple beginner problems
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
possible_filename = markup
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
break
|
||||
except ParserRejectedMarkup:
|
||||
pass
|
||||
|
||||
# Clear out the markup and remove the builder's circular
|
||||
# reference to this object.
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
|
||||
self.builder.feed(self.markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
self.popTag()
|
||||
|
||||
def reset(self):
|
||||
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
self.builder.reset()
|
||||
self.current_data = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
self.preserve_whitespace_tag_stack = []
|
||||
self.pushTag(self)
|
||||
|
||||
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
|
||||
"""Create a new tag associated with this soup."""
|
||||
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
|
||||
def insert_after(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
|
||||
|
||||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
|
||||
self.preserve_whitespace_tag_stack.pop()
|
||||
#print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
self.currentTag = self.tagStack[-1]
|
||||
if tag.name in self.builder.preserve_whitespace_tags:
|
||||
self.preserve_whitespace_tag_stack.append(tag)
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
if not self.preserve_whitespace_tag_stack:
|
||||
strippable = True
|
||||
for i in current_data:
|
||||
if i not in self.ASCII_SPACES:
|
||||
strippable = False
|
||||
break
|
||||
if strippable:
|
||||
if '\n' in current_data:
|
||||
current_data = '\n'
|
||||
else:
|
||||
current_data = ' '
|
||||
|
||||
# Reset the data collector.
|
||||
self.current_data = []
|
||||
|
||||
# Should we add this string to the tree at all?
|
||||
if self.parse_only and len(self.tagStack) <= 1 and \
|
||||
(not self.parse_only.text or \
|
||||
not self.parse_only.search(current_data)):
|
||||
return
|
||||
|
||||
o = containerClass(current_data)
|
||||
self.object_was_parsed(o)
|
||||
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
# The BeautifulSoup object itself can never be popped.
|
||||
return
|
||||
|
||||
most_recently_popped = None
|
||||
|
||||
stack_size = len(self.tagStack)
|
||||
for i in range(stack_size - 1, 0, -1):
|
||||
t = self.tagStack[i]
|
||||
if (name == t.name and nsprefix == t.prefix):
|
||||
if inclusivePop:
|
||||
most_recently_popped = self.popTag()
|
||||
break
|
||||
most_recently_popped = self.popTag()
|
||||
|
||||
return most_recently_popped
|
||||
|
||||
def handle_starttag(self, name, namespace, nsprefix, attrs):
|
||||
"""Push a start tag on to the stack.
|
||||
|
||||
If this method returns None, the tag was rejected by the
|
||||
SoupStrainer. You should proceed as if the tag had not occured
|
||||
in the document. For instance, if this was a self-closing tag,
|
||||
don't call handle_endtag.
|
||||
"""
|
||||
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
self.endData()
|
||||
|
||||
if (self.parse_only and len(self.tagStack) <= 1
|
||||
and (self.parse_only.text
|
||||
or not self.parse_only.search_tag(name, attrs))):
|
||||
return None
|
||||
|
||||
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
|
||||
self.currentTag, self._most_recent_element)
|
||||
if tag is None:
|
||||
return tag
|
||||
if self._most_recent_element:
|
||||
self._most_recent_element.next_element = tag
|
||||
self._most_recent_element = tag
|
||||
self.pushTag(tag)
|
||||
return tag
|
||||
|
||||
def handle_endtag(self, name, nsprefix=None):
|
||||
#print "End tag: " + name
|
||||
self.endData()
|
||||
self._popToTag(name, nsprefix)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.current_data.append(data)
|
||||
|
||||
def decode(self, pretty_print=False,
|
||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Returns a string or Unicode representation of this document.
|
||||
To get Unicode, pass None for encoding."""
|
||||
|
||||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
indent_level = 0
|
||||
return prefix + super(BeautifulSoup, self).decode(
|
||||
indent_level, eventual_encoding, formatter)
|
||||
|
||||
# Alias to make it easier to type import: 'from bs4 import _soup'
|
||||
_s = BeautifulSoup
|
||||
_soup = BeautifulSoup
|
||||
|
||||
class BeautifulStoneSoup(BeautifulSoup):
|
||||
"""Deprecated interface to an XML parser."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['features'] = 'xml'
|
||||
warnings.warn(
|
||||
'The BeautifulStoneSoup class is deprecated. Instead of using '
|
||||
'it, pass features="xml" into the BeautifulSoup constructor.')
|
||||
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopParsing(Exception):
|
||||
pass
|
||||
|
||||
class FeatureNotFound(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
@@ -1,321 +0,0 @@
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import sys
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
whitespace_re
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'HTMLTreeBuilder',
|
||||
'SAXTreeBuilder',
|
||||
'TreeBuilder',
|
||||
'TreeBuilderRegistry',
|
||||
]
|
||||
|
||||
# Some useful features for a TreeBuilder to have.
|
||||
FAST = 'fast'
|
||||
PERMISSIVE = 'permissive'
|
||||
STRICT = 'strict'
|
||||
XML = 'xml'
|
||||
HTML = 'html'
|
||||
HTML_5 = 'html5'
|
||||
|
||||
|
||||
class TreeBuilderRegistry(object):
|
||||
|
||||
def __init__(self):
|
||||
self.builders_for_feature = defaultdict(list)
|
||||
self.builders = []
|
||||
|
||||
def register(self, treebuilder_class):
|
||||
"""Register a treebuilder based on its advertised features."""
|
||||
for feature in treebuilder_class.features:
|
||||
self.builders_for_feature[feature].insert(0, treebuilder_class)
|
||||
self.builders.insert(0, treebuilder_class)
|
||||
|
||||
def lookup(self, *features):
|
||||
if len(self.builders) == 0:
|
||||
# There are no builders at all.
|
||||
return None
|
||||
|
||||
if len(features) == 0:
|
||||
# They didn't ask for any features. Give them the most
|
||||
# recently registered builder.
|
||||
return self.builders[0]
|
||||
|
||||
# Go down the list of features in order, and eliminate any builders
|
||||
# that don't match every feature.
|
||||
features = list(features)
|
||||
features.reverse()
|
||||
candidates = None
|
||||
candidate_set = None
|
||||
while len(features) > 0:
|
||||
feature = features.pop()
|
||||
we_have_the_feature = self.builders_for_feature.get(feature, [])
|
||||
if len(we_have_the_feature) > 0:
|
||||
if candidates is None:
|
||||
candidates = we_have_the_feature
|
||||
candidate_set = set(candidates)
|
||||
else:
|
||||
# Eliminate any candidates that don't have this feature.
|
||||
candidate_set = candidate_set.intersection(
|
||||
set(we_have_the_feature))
|
||||
|
||||
# The only valid candidates are the ones in candidate_set.
|
||||
# Go through the original list of candidates and pick the first one
|
||||
# that's in candidate_set.
|
||||
if candidate_set is None:
|
||||
return None
|
||||
for candidate in candidates:
|
||||
if candidate in candidate_set:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
# The BeautifulSoup class will take feature lists from developers and use them
|
||||
# to look up builders in this registry.
|
||||
builder_registry = TreeBuilderRegistry()
|
||||
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
|
||||
# A value for these tag/attribute combinations is a space- or
|
||||
# comma-separated list of CDATA, rather than a single CDATA.
|
||||
cdata_list_attributes = {}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.soup = None
|
||||
|
||||
def reset(self):
|
||||
pass
|
||||
|
||||
def can_be_empty_element(self, tag_name):
|
||||
"""Might a tag with this name be an empty-element tag?
|
||||
|
||||
The final markup may or may not actually present this tag as
|
||||
self-closing.
|
||||
|
||||
For instance: an HTMLBuilder does not consider a <p> tag to be
|
||||
an empty-element tag (it's not in
|
||||
HTMLBuilder.empty_element_tags). This means an empty <p> tag
|
||||
will be presented as "<p></p>", not "<p />".
|
||||
|
||||
The default implementation has no opinion about which tags are
|
||||
empty-element tags, so a tag will be presented as an
|
||||
empty-element tag if and only if it has no contents.
|
||||
"<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
|
||||
be left alone.
|
||||
"""
|
||||
if self.empty_element_tags is None:
|
||||
return True
|
||||
return tag_name in self.empty_element_tags
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
return markup, None, None, False
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""Wrap an HTML fragment to make it look like a document.
|
||||
|
||||
Different parsers do this differently. For instance, lxml
|
||||
introduces an empty <head> tag, and html5lib
|
||||
doesn't. Abstracting this away lets us write simple tests
|
||||
which run HTML fragments through the parser and compare the
|
||||
results against other HTML fragments.
|
||||
|
||||
This method should not be used outside of tests.
|
||||
"""
|
||||
return fragment
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
return False
|
||||
|
||||
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
|
||||
"""Replaces class="foo bar" with class=["foo", "bar"]
|
||||
|
||||
Modifies its input in place.
|
||||
"""
|
||||
if not attrs:
|
||||
return attrs
|
||||
if self.cdata_list_attributes:
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
# for the same tag when rearranging the parse
|
||||
# tree. On the second call the attribute value
|
||||
# here is already a list. If this happens,
|
||||
# leave the value alone rather than trying to
|
||||
# split it again.
|
||||
values = value
|
||||
attrs[attr] = values
|
||||
return attrs
|
||||
|
||||
class SAXTreeBuilder(TreeBuilder):
|
||||
"""A Beautiful Soup treebuilder that listens for SAX events."""
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
attrs = dict((key[1], value) for key, value in list(attrs.items()))
|
||||
#print "Start %s, %r" % (name, attrs)
|
||||
self.soup.handle_starttag(name, attrs)
|
||||
|
||||
def endElement(self, name):
|
||||
#print "End %s" % name
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def startElementNS(self, nsTuple, nodeName, attrs):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.startElement(nodeName, attrs)
|
||||
|
||||
def endElementNS(self, nsTuple, nodeName):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.endElement(nodeName)
|
||||
#handler.endElementNS((ns, node.nodeName), node.nodeName)
|
||||
|
||||
def startPrefixMapping(self, prefix, nodeValue):
|
||||
# Ignore the prefix for now.
|
||||
pass
|
||||
|
||||
def endPrefixMapping(self, prefix):
|
||||
# Ignore the prefix for now.
|
||||
# handler.endPrefixMapping(prefix)
|
||||
pass
|
||||
|
||||
def characters(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def startDocument(self):
|
||||
pass
|
||||
|
||||
def endDocument(self):
|
||||
pass
|
||||
|
||||
|
||||
class HTMLTreeBuilder(TreeBuilder):
|
||||
"""This TreeBuilder knows facts about HTML.
|
||||
|
||||
Such as which tags are empty-element tags.
|
||||
"""
|
||||
|
||||
preserve_whitespace_tags = set(['pre', 'textarea'])
|
||||
empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
|
||||
'spacer', 'link', 'frame', 'base'])
|
||||
|
||||
# The HTML standard defines these attributes as containing a
|
||||
# space-separated list of values, not a single value. That is,
|
||||
# class="foo bar" means that the 'class' attribute has two values,
|
||||
# 'foo' and 'bar', not the single value 'foo bar'. When we
|
||||
# encounter one of these attributes, we will parse its value into
|
||||
# a list of values if possible. Upon output, the list will be
|
||||
# converted back into a string.
|
||||
cdata_list_attributes = {
|
||||
"*" : ['class', 'accesskey', 'dropzone'],
|
||||
"a" : ['rel', 'rev'],
|
||||
"link" : ['rel', 'rev'],
|
||||
"td" : ["headers"],
|
||||
"th" : ["headers"],
|
||||
"td" : ["headers"],
|
||||
"form" : ["accept-charset"],
|
||||
"object" : ["archive"],
|
||||
|
||||
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
|
||||
"area" : ["rel"],
|
||||
"icon" : ["sizes"],
|
||||
"iframe" : ["sandbox"],
|
||||
"output" : ["for"],
|
||||
}
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
# We are only interested in <meta> tags
|
||||
if tag.name != 'meta':
|
||||
return False
|
||||
|
||||
http_equiv = tag.get('http-equiv')
|
||||
content = tag.get('content')
|
||||
charset = tag.get('charset')
|
||||
|
||||
# We are interested in <meta> tags that say what encoding the
|
||||
# document was originally in. This means HTML 5-style <meta>
|
||||
# tags that provide the "charset" attribute. It also means
|
||||
# HTML 4-style <meta> tags that provide the "content"
|
||||
# attribute and have "http-equiv" set to "content-type".
|
||||
#
|
||||
# In both cases we will replace the value of the appropriate
|
||||
# attribute with a standin object that can take on any
|
||||
# encoding.
|
||||
meta_encoding = None
|
||||
if charset is not None:
|
||||
# HTML 5 style:
|
||||
# <meta charset="utf8">
|
||||
meta_encoding = charset
|
||||
tag['charset'] = CharsetMetaAttributeValue(charset)
|
||||
|
||||
elif (content is not None and http_equiv is not None
|
||||
and http_equiv.lower() == 'content-type'):
|
||||
# HTML 4 style:
|
||||
# <meta http-equiv="content-type" content="text/html; charset=utf8">
|
||||
tag['content'] = ContentMetaAttributeValue(content)
|
||||
|
||||
return (meta_encoding is not None)
|
||||
|
||||
def register_treebuilders_from(module):
|
||||
"""Copy TreeBuilders from the given module into this module."""
|
||||
# I'm fairly sure this is not the best way to do this.
|
||||
this_module = sys.modules['bs4.builder']
|
||||
for name in module.__all__:
|
||||
obj = getattr(module, name)
|
||||
|
||||
if issubclass(obj, TreeBuilder):
|
||||
setattr(this_module, name, obj)
|
||||
this_module.__all__.append(name)
|
||||
# Register the builder while we're at it.
|
||||
this_module.builder_registry.register(obj)
|
||||
|
||||
class ParserRejectedMarkup(Exception):
|
||||
pass
|
||||
|
||||
# Builders are registered in reverse order of priority, so that custom
|
||||
# builder registrations will take precedence. In general, we want lxml
|
||||
# to take precedence over html5lib, because it's faster. And we only
|
||||
# want to use HTMLParser as a last result.
|
||||
from . import _htmlparser
|
||||
register_treebuilders_from(_htmlparser)
|
||||
try:
|
||||
from . import _html5lib
|
||||
register_treebuilders_from(_html5lib)
|
||||
except ImportError:
|
||||
# They don't have html5lib installed.
|
||||
pass
|
||||
try:
|
||||
from . import _lxml
|
||||
register_treebuilders_from(_lxml)
|
||||
except ImportError:
|
||||
# They don't have lxml installed.
|
||||
pass
|
||||
@@ -1,285 +0,0 @@
|
||||
__all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
HTML,
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
from html5lib.constants import namespaces
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
Tag,
|
||||
)
|
||||
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
def feed(self, markup):
|
||||
if self.soup.parse_only is not None:
|
||||
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
|
||||
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
else:
|
||||
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
|
||||
|
||||
def create_treebuilder(self, namespaceHTMLElements):
|
||||
self.underlying_builder = TreeBuilderForHtml5lib(
|
||||
self.soup, namespaceHTMLElements)
|
||||
return self.underlying_builder
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
|
||||
|
||||
def documentClass(self):
|
||||
self.soup.reset()
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def insertDoctype(self, token):
|
||||
name = token["name"]
|
||||
publicId = token["publicId"]
|
||||
systemId = token["systemId"]
|
||||
|
||||
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def elementClass(self, name, namespace):
|
||||
tag = self.soup.new_tag(name, namespace)
|
||||
return Element(tag, self.soup, namespace)
|
||||
|
||||
def commentClass(self, data):
|
||||
return TextNode(Comment(data), self.soup)
|
||||
|
||||
def fragmentClass(self):
|
||||
self.soup = BeautifulSoup("")
|
||||
self.soup.name = "[document_fragment]"
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def appendChild(self, node):
|
||||
# XXX This code is not covered by the BS4 tests.
|
||||
self.soup.append(node.element)
|
||||
|
||||
def getDocument(self):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
self.attrs = dict(self.element.attrs)
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
def keys(self):
|
||||
return list(self.attrs.keys())
|
||||
def __len__(self):
|
||||
return len(self.attrs)
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
def __contains__(self, name):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(html5lib.treebuilders._base.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
html5lib.treebuilders._base.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
string_child = child = node
|
||||
elif isinstance(node, Tag):
|
||||
# Some other piece of code decided to pass in a Tag
|
||||
# instead of creating an Element object to contain the
|
||||
# Tag.
|
||||
child = node
|
||||
elif node.element.__class__ == NavigableString:
|
||||
string_child = child = node.element
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
and self.element.contents[-1].__class__ == NavigableString):
|
||||
# We are appending a string onto another string.
|
||||
# TODO This has O(n^2) performance, for input like
|
||||
# "a</a>a</a>a</a>..."
|
||||
old_element = self.element.contents[-1]
|
||||
new_element = self.soup.new_string(old_element + string_child)
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
# Tell Beautiful Soup to act as if it parsed this element
|
||||
# immediately after the parent's last descendant. (Or
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
self.soup.object_was_parsed(
|
||||
child, parent=self.element,
|
||||
most_recent_element=most_recent_element)
|
||||
|
||||
def getAttributes(self):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
for name, value in list(attributes.items()):
|
||||
if isinstance(name, tuple):
|
||||
new_name = NamespacedAttribute(*name)
|
||||
del attributes[name]
|
||||
attributes[new_name] = value
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
# Call set_up_substitutions manually.
|
||||
#
|
||||
# The Tag constructor called this method when the Tag was created,
|
||||
# but we just set/changed the attributes, so call it again.
|
||||
self.soup.builder.set_up_substitutions(self.element)
|
||||
attributes = property(getAttributes, setAttributes)
|
||||
|
||||
def insertText(self, data, insertBefore=None):
|
||||
if insertBefore:
|
||||
text = TextNode(self.soup.new_string(data), self.soup)
|
||||
self.insertBefore(data, insertBefore)
|
||||
else:
|
||||
self.appendChild(data)
|
||||
|
||||
def insertBefore(self, node, refNode):
|
||||
index = self.element.index(refNode.element)
|
||||
if (node.element.__class__ == NavigableString and self.element.contents
|
||||
and self.element.contents[index-1].__class__ == NavigableString):
|
||||
# (See comments in appendChild)
|
||||
old_node = self.element.contents[index-1]
|
||||
new_str = self.soup.new_string(old_node + node.element)
|
||||
old_node.replace_with(new_str)
|
||||
else:
|
||||
self.element.insert(index, node.element)
|
||||
node.parent = self
|
||||
|
||||
def removeChild(self, node):
|
||||
node.element.extract()
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
# are removed.
|
||||
final_next_element = element.next_sibling
|
||||
|
||||
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
|
||||
if len(new_parent_element.contents) > 0:
|
||||
# The new parent already contains children. We will be
|
||||
# appending this tag's children to the end.
|
||||
new_parents_last_child = new_parent_element.contents[-1]
|
||||
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
|
||||
else:
|
||||
# The new parent contains no children.
|
||||
new_parents_last_child = None
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
child.parent = new_parent_element
|
||||
new_parent_element.contents.append(child)
|
||||
|
||||
# Now that this element has no children, change its .next_element.
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
for key,value in self.attributes:
|
||||
node.attributes[key] = value
|
||||
return node
|
||||
|
||||
def hasContent(self):
|
||||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
||||
nameTuple = property(getNameTuple)
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
html5lib.treebuilders._base.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
def cloneNode(self):
|
||||
raise NotImplementedError
|
||||
@@ -1,258 +0,0 @@
|
||||
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
|
||||
|
||||
__all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
|
||||
# argument, which we'd like to set to False. Unfortunately,
|
||||
# http://bugs.python.org/issue13273 makes strict=True a better bet
|
||||
# before Python 3.2.3.
|
||||
#
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
Comment,
|
||||
Declaration,
|
||||
Doctype,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||
|
||||
from bs4.builder import (
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
STRICT,
|
||||
)
|
||||
|
||||
|
||||
HTMLPARSER = 'html.parser'
|
||||
|
||||
class BeautifulSoupHTMLParser(HTMLParser):
|
||||
def handle_starttag(self, name, attrs):
|
||||
# XXX namespace
|
||||
attr_dict = {}
|
||||
for key, value in attrs:
|
||||
# Change None attribute values to the empty string
|
||||
# for consistency with the other tree builders.
|
||||
if value is None:
|
||||
value = ''
|
||||
attr_dict[key] = value
|
||||
attrvalue = '""'
|
||||
self.soup.handle_starttag(name, None, None, attr_dict)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.soup.handle_data(data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
real_name = int(name.lstrip('X'), 16)
|
||||
else:
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
|
||||
if character is not None:
|
||||
data = character
|
||||
else:
|
||||
data = "&%s;" % name
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self.soup.endData()
|
||||
if data.startswith("DOCTYPE "):
|
||||
data = data[len("DOCTYPE "):]
|
||||
elif data == 'DOCTYPE':
|
||||
# i.e. "<!DOCTYPE>"
|
||||
data = ''
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Doctype)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
if data.upper().startswith('CDATA['):
|
||||
cls = CData
|
||||
data = data[len('CDATA['):]
|
||||
else:
|
||||
cls = Declaration
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(cls)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
|
||||
def feed(self, markup):
|
||||
args, kwargs = self.parser_args
|
||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
|
||||
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
|
||||
# string.
|
||||
#
|
||||
# XXX This code can be removed once most Python 3 users are on 3.2.3.
|
||||
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
|
||||
import re
|
||||
attrfind_tolerant = re.compile(
|
||||
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
|
||||
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
|
||||
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
|
||||
|
||||
locatestarttagend = re.compile(r"""
|
||||
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
|
||||
(?:\s+ # whitespace before attribute name
|
||||
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
|
||||
(?:\s*=\s* # value indicator
|
||||
(?:'[^']*' # LITA-enclosed value
|
||||
|\"[^\"]*\" # LIT-enclosed value
|
||||
|[^'\">\s]+ # bare value
|
||||
)
|
||||
)?
|
||||
)
|
||||
)*
|
||||
\s* # trailing whitespace
|
||||
""", re.VERBOSE)
|
||||
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
|
||||
|
||||
from html.parser import tagfind, attrfind
|
||||
|
||||
def parse_starttag(self, i):
|
||||
self.__starttag_text = None
|
||||
endpos = self.check_for_whole_start_tag(i)
|
||||
if endpos < 0:
|
||||
return endpos
|
||||
rawdata = self.rawdata
|
||||
self.__starttag_text = rawdata[i:endpos]
|
||||
|
||||
# Now parse the data between i+1 and j into a tag and attrs
|
||||
attrs = []
|
||||
match = tagfind.match(rawdata, i+1)
|
||||
assert match, 'unexpected call to parse_starttag()'
|
||||
k = match.end()
|
||||
self.lasttag = tag = rawdata[i+1:k].lower()
|
||||
while k < endpos:
|
||||
if self.strict:
|
||||
m = attrfind.match(rawdata, k)
|
||||
else:
|
||||
m = attrfind_tolerant.match(rawdata, k)
|
||||
if not m:
|
||||
break
|
||||
attrname, rest, attrvalue = m.group(1, 2, 3)
|
||||
if not rest:
|
||||
attrvalue = None
|
||||
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
|
||||
attrvalue[:1] == '"' == attrvalue[-1:]:
|
||||
attrvalue = attrvalue[1:-1]
|
||||
if attrvalue:
|
||||
attrvalue = self.unescape(attrvalue)
|
||||
attrs.append((attrname.lower(), attrvalue))
|
||||
k = m.end()
|
||||
|
||||
end = rawdata[k:endpos].strip()
|
||||
if end not in (">", "/>"):
|
||||
lineno, offset = self.getpos()
|
||||
if "\n" in self.__starttag_text:
|
||||
lineno = lineno + self.__starttag_text.count("\n")
|
||||
offset = len(self.__starttag_text) \
|
||||
- self.__starttag_text.rfind("\n")
|
||||
else:
|
||||
offset = offset + len(self.__starttag_text)
|
||||
if self.strict:
|
||||
self.error("junk characters in start tag: %r"
|
||||
% (rawdata[k:endpos][:20],))
|
||||
self.handle_data(rawdata[i:endpos])
|
||||
return endpos
|
||||
if end.endswith('/>'):
|
||||
# XHTML-style empty tag: <span attr="value" />
|
||||
self.handle_startendtag(tag, attrs)
|
||||
else:
|
||||
self.handle_starttag(tag, attrs)
|
||||
if tag in self.CDATA_CONTENT_ELEMENTS:
|
||||
self.set_cdata_mode(tag)
|
||||
return endpos
|
||||
|
||||
def set_cdata_mode(self, elem):
|
||||
self.cdata_elem = elem.lower()
|
||||
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
|
||||
|
||||
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
|
||||
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = True
|
||||
@@ -1,233 +0,0 @@
|
||||
__all__ = [
|
||||
'LXMLTreeBuilderForXML',
|
||||
'LXMLTreeBuilder',
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
PERMISSIVE,
|
||||
ParserRejectedMarkup,
|
||||
TreeBuilder,
|
||||
XML)
|
||||
from bs4.dammit import EncodingDetector
|
||||
|
||||
LXML = 'lxml'
|
||||
|
||||
class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
DEFAULT_PARSER_CLASS = etree.XMLParser
|
||||
|
||||
is_xml = True
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
# This namespace mapping is specified in the XML Namespace
|
||||
# standard.
|
||||
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
|
||||
|
||||
def default_parser(self, encoding):
|
||||
# This can either return a parser object or a class, which
|
||||
# will be instantiated with default arguments.
|
||||
if self._default_parser is not None:
|
||||
return self._default_parser
|
||||
return etree.XMLParser(
|
||||
target=self, strip_cdata=False, recover=True, encoding=encoding)
|
||||
|
||||
def parser_for(self, encoding):
|
||||
# Use the default parser.
|
||||
parser = self.default_parser(encoding)
|
||||
|
||||
if isinstance(parser, collections.Callable):
|
||||
# Instantiate the parser with default arguments
|
||||
parser = parser(target=self, strip_cdata=False, encoding=encoding)
|
||||
return parser
|
||||
|
||||
def __init__(self, parser=None, empty_element_tags=None):
|
||||
# TODO: Issue a warning if parser is present but not a
|
||||
# callable, since that means there's no way to create new
|
||||
# parsers for different encodings.
|
||||
self._default_parser = parser
|
||||
if empty_element_tags is not None:
|
||||
self.empty_element_tags = set(empty_element_tags)
|
||||
self.soup = None
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def _getNsTag(self, tag):
|
||||
# Split the namespace URL out of a fully-qualified lxml tag
|
||||
# name. Copied from lxml's src/lxml/sax.py.
|
||||
if tag[0] == '{':
|
||||
return tuple(tag[1:].split('}', 1))
|
||||
else:
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
(markup, encoding, declared encoding,
|
||||
has undergone character replacement)
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
document_declared_encoding, False)
|
||||
|
||||
# Instead of using UnicodeDammit to convert the bytestring to
|
||||
# Unicode using different encodings, use EncodingDetector to
|
||||
# iterate over the encodings, and tell lxml to try to parse
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
# or the parser won't be initialized.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
try:
|
||||
self.parser = self.parser_for(self.soup.original_encoding)
|
||||
self.parser.feed(data)
|
||||
while len(data) != 0:
|
||||
# Now call feed() on the rest of the data, chunk by chunk.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def start(self, name, attrs, nsmap={}):
|
||||
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
|
||||
attrs = dict(attrs)
|
||||
nsprefix = None
|
||||
# Invert each namespace map as it comes in.
|
||||
if len(self.nsmaps) > 1:
|
||||
# There are no new namespaces for this tag, but
|
||||
# non-default namespaces are in play, so we need a
|
||||
# separate tag stack to know when they end.
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
|
||||
# Namespaces are in play. Find any attributes that came in
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
else:
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
attr = NamespacedAttribute(nsprefix, attr, namespace)
|
||||
new_attrs[attr] = value
|
||||
attrs = new_attrs
|
||||
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
|
||||
|
||||
def _prefix_for_namespace(self, namespace):
|
||||
"""Find the currently active prefix for the given namespace."""
|
||||
if namespace is None:
|
||||
return None
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
return inverted_nsmap[namespace]
|
||||
return None
|
||||
|
||||
def end(self, name):
|
||||
self.soup.endData()
|
||||
completed_tag = self.soup.tagStack[-1]
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = None
|
||||
if namespace is not None:
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
nsprefix = inverted_nsmap[namespace]
|
||||
break
|
||||
self.soup.handle_endtag(name, nsprefix)
|
||||
if len(self.nsmaps) > 1:
|
||||
# This tag, or one of its parents, introduced a namespace
|
||||
# mapping, so pop it off the stack.
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def doctype(self, name, pubid, system):
|
||||
self.soup.endData()
|
||||
doctype = Doctype.for_name_and_ids(name, pubid, system)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def comment(self, content):
|
||||
"Handle comments as Comment objects."
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(content)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
return etree.HTMLParser
|
||||
|
||||
def feed(self, markup):
|
||||
encoding = self.soup.original_encoding
|
||||
try:
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
@@ -1,829 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Beautiful Soup bonus library: Unicode, Dammit
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
|
||||
# Import a library to autodetect character encodings.
|
||||
chardet_type = None
|
||||
try:
|
||||
# First try the fast C implementation.
|
||||
# PyPI package: cchardet
|
||||
import cchardet
|
||||
def chardet_dammit(s):
|
||||
return cchardet.detect(s)['encoding']
|
||||
except ImportError:
|
||||
try:
|
||||
# Fall back to the pure Python implementation
|
||||
# Debian package: python-chardet
|
||||
# PyPI package: chardet
|
||||
import chardet
|
||||
def chardet_dammit(s):
|
||||
return chardet.detect(s)['encoding']
|
||||
#import chardet.constants
|
||||
#chardet.constants._debug = 1
|
||||
except ImportError:
|
||||
# No chardet available.
|
||||
def chardet_dammit(s):
|
||||
return None
|
||||
|
||||
# Available from http://cjkpython.i18n.org/.
|
||||
try:
|
||||
import iconv_codec
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
xml_encoding_re = re.compile(
|
||||
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
|
||||
html_meta_re = re.compile(
|
||||
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
|
||||
|
||||
class EntitySubstitution(object):
|
||||
|
||||
"""Substitute XML or HTML entities for the corresponding characters."""
|
||||
|
||||
def _populate_class_variables():
|
||||
lookup = {}
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
# is handled elsewhere.
|
||||
characters_for_re.append(character)
|
||||
lookup[character] = name
|
||||
# But we do want to turn " into the quotation mark.
|
||||
reverse_lookup[name] = character
|
||||
re_definition = "[%s]" % "".join(characters_for_re)
|
||||
return lookup, reverse_lookup, re.compile(re_definition)
|
||||
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
|
||||
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
|
||||
|
||||
CHARACTER_TO_XML_ENTITY = {
|
||||
"'": "apos",
|
||||
'"': "quot",
|
||||
"&": "amp",
|
||||
"<": "lt",
|
||||
">": "gt",
|
||||
}
|
||||
|
||||
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
|
||||
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
|
||||
")")
|
||||
|
||||
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
|
||||
|
||||
@classmethod
|
||||
def _substitute_html_entity(cls, matchobj):
|
||||
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def _substitute_xml_entity(cls, matchobj):
|
||||
"""Used with a regular expression to substitute the
|
||||
appropriate XML entity for an XML special character."""
|
||||
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def quoted_attribute_value(self, value):
|
||||
"""Make a value into a quoted XML attribute, possibly escaping it.
|
||||
|
||||
Most strings will be quoted using double quotes.
|
||||
|
||||
Bob's Bar -> "Bob's Bar"
|
||||
|
||||
If a string contains double quotes, it will be quoted using
|
||||
single quotes.
|
||||
|
||||
Welcome to "my bar" -> 'Welcome to "my bar"'
|
||||
|
||||
If a string contains both single and double quotes, the
|
||||
double quotes will be escaped, and the string will be quoted
|
||||
using double quotes.
|
||||
|
||||
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
|
||||
"""
|
||||
quote_with = '"'
|
||||
if '"' in value:
|
||||
if "'" in value:
|
||||
# The string contains both single and double
|
||||
# quotes. Turn the double quotes into
|
||||
# entities. We quote the double quotes rather than
|
||||
# the single quotes because the entity name is
|
||||
# """ whether this is HTML or XML. If we
|
||||
# quoted the single quotes, we'd have to decide
|
||||
# between ' and &squot;.
|
||||
replace_with = """
|
||||
value = value.replace('"', replace_with)
|
||||
else:
|
||||
# There are double quotes but no single quotes.
|
||||
# We can use single quotes to quote the attribute.
|
||||
quote_with = "'"
|
||||
return quote_with + value + quote_with
|
||||
|
||||
@classmethod
|
||||
def substitute_xml(cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign
|
||||
will become <, the greater-than sign will become >,
|
||||
and any ampersands will become &. If you want ampersands
|
||||
that appear to be part of an entity definition to be left
|
||||
alone, use substitute_xml_containing_entities() instead.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets and ampersands.
|
||||
value = cls.AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_xml_containing_entities(
|
||||
cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign will
|
||||
become <, the greater-than sign will become >, and any
|
||||
ampersands that are not part of an entity defition will
|
||||
become &.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets, and ampersands that aren't part of
|
||||
# entities.
|
||||
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_html(cls, s):
|
||||
"""Replace certain Unicode characters with named HTML entities.
|
||||
|
||||
This differs from data.encode(encoding, 'xmlcharrefreplace')
|
||||
in that the goal is to make the result more readable (to those
|
||||
with ASCII displays) rather than to recover from
|
||||
errors. There's absolutely nothing wrong with a UTF-8 string
|
||||
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
|
||||
character with "é" will make it more readable to some
|
||||
people.
|
||||
"""
|
||||
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
|
||||
cls._substitute_html_entity, s)
|
||||
|
||||
|
||||
class EncodingDetector:
|
||||
"""Suggests a number of possible encodings for a bytestring.
|
||||
|
||||
Order of precedence:
|
||||
|
||||
1. Encodings you specifically tell EncodingDetector to try first
|
||||
(the override_encodings argument to the constructor).
|
||||
|
||||
2. An encoding declared within the bytestring itself, either in an
|
||||
XML declaration (if the bytestring is to be interpreted as an XML
|
||||
document), or in a <meta> tag (if the bytestring is to be
|
||||
interpreted as an HTML document.)
|
||||
|
||||
3. An encoding detected through textual analysis by chardet,
|
||||
cchardet, or a similar external library.
|
||||
|
||||
4. UTF-8.
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
|
||||
# First order of business: strip a byte-order mark.
|
||||
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
|
||||
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def encodings(self):
|
||||
"""Yield a number of encodings that might work for this markup."""
|
||||
tried = set()
|
||||
for e in self.override_encodings:
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
# Did the document originally start with a byte-order mark
|
||||
# that indicated its encoding?
|
||||
if self._usable(self.sniffed_encoding, tried):
|
||||
yield self.sniffed_encoding
|
||||
|
||||
# Look within the document for an XML or HTML encoding
|
||||
# declaration.
|
||||
if self.declared_encoding is None:
|
||||
self.declared_encoding = self.find_declared_encoding(
|
||||
self.markup, self.is_html)
|
||||
if self._usable(self.declared_encoding, tried):
|
||||
yield self.declared_encoding
|
||||
|
||||
# Use third-party character set detection to guess at the
|
||||
# encoding.
|
||||
if self.chardet_encoding is None:
|
||||
self.chardet_encoding = chardet_dammit(self.markup)
|
||||
if self._usable(self.chardet_encoding, tried):
|
||||
yield self.chardet_encoding
|
||||
|
||||
# As a last-ditch effort, try utf-8 and windows-1252.
|
||||
for e in ('utf-8', 'windows-1252'):
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
@classmethod
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
data = data[2:]
|
||||
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16le'
|
||||
data = data[2:]
|
||||
elif data[:3] == b'\xef\xbb\xbf':
|
||||
encoding = 'utf-8'
|
||||
data = data[3:]
|
||||
elif data[:4] == b'\x00\x00\xfe\xff':
|
||||
encoding = 'utf-32be'
|
||||
data = data[4:]
|
||||
elif data[:4] == b'\xff\xfe\x00\x00':
|
||||
encoding = 'utf-32le'
|
||||
data = data[4:]
|
||||
return data, encoding
|
||||
|
||||
@classmethod
|
||||
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
|
||||
"""Given a document, tries to find its declared encoding.
|
||||
|
||||
An XML encoding is declared at the beginning of the document.
|
||||
|
||||
An HTML encoding is declared in a <meta> tag, hopefully near the
|
||||
beginning of the document.
|
||||
"""
|
||||
if search_entire_document:
|
||||
xml_endpos = html_endpos = len(markup)
|
||||
else:
|
||||
xml_endpos = 1024
|
||||
html_endpos = max(2048, int(len(markup) * 0.05))
|
||||
|
||||
declared_encoding = None
|
||||
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
|
||||
if not declared_encoding_match and is_html:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
|
||||
class UnicodeDammit:
|
||||
"""A class for detecting the encoding of a *ML document and
|
||||
converting it to a Unicode string. If the source encoding is
|
||||
windows-1252, can replace MS smart quotes with their HTML or XML
|
||||
equivalents."""
|
||||
|
||||
# This dictionary maps commonly seen values for "charset" in HTML
|
||||
# meta tags to the corresponding Python codec names. It only covers
|
||||
# values that aren't in Python's aliases and can't be determined
|
||||
# by the heuristics in find_codec.
|
||||
CHARSET_ALIASES = {"macintosh": "mac-roman",
|
||||
"x-sjis": "shift-jis"}
|
||||
|
||||
ENCODINGS_WITH_SMART_QUOTES = [
|
||||
"windows-1252",
|
||||
"iso-8859-1",
|
||||
"iso-8859-2",
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
# The encoding detector may have stripped a byte-order mark.
|
||||
# Use the stripped markup from this point on.
|
||||
self.markup = self.detector.markup
|
||||
|
||||
u = None
|
||||
for encoding in self.detector.encodings:
|
||||
markup = self.detector.markup
|
||||
u = self._convert_from(encoding)
|
||||
if u is not None:
|
||||
break
|
||||
|
||||
if not u:
|
||||
# None of the encodings worked. As an absolute last resort,
|
||||
# try them again with character replacement.
|
||||
|
||||
for encoding in self.detector.encodings:
|
||||
if encoding != "ascii":
|
||||
u = self._convert_from(encoding, "replace")
|
||||
if u is not None:
|
||||
logging.warning(
|
||||
"Some characters could not be decoded, and were "
|
||||
"replaced with REPLACEMENT CHARACTER.")
|
||||
self.contains_replacement_characters = True
|
||||
break
|
||||
|
||||
# If none of that worked, we could at this point force it to
|
||||
# ASCII, but that would destroy so much data that I think
|
||||
# giving up is better.
|
||||
self.unicode_markup = u
|
||||
if not u:
|
||||
self.original_encoding = None
|
||||
|
||||
def _sub_ms_char(self, match):
|
||||
"""Changes a MS smart quote character to an XML or HTML
|
||||
entity, or an ASCII character."""
|
||||
orig = match.group(1)
|
||||
if self.smart_quotes_to == 'ascii':
|
||||
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
|
||||
else:
|
||||
sub = self.MS_CHARS.get(orig)
|
||||
if type(sub) == tuple:
|
||||
if self.smart_quotes_to == 'xml':
|
||||
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
|
||||
else:
|
||||
sub = '&'.encode() + sub[0].encode() + ';'.encode()
|
||||
else:
|
||||
sub = sub.encode()
|
||||
return sub
|
||||
|
||||
def _convert_from(self, proposed, errors="strict"):
|
||||
proposed = self.find_codec(proposed)
|
||||
if not proposed or (proposed, errors) in self.tried_encodings:
|
||||
return None
|
||||
self.tried_encodings.append((proposed, errors))
|
||||
markup = self.markup
|
||||
# Convert smart quotes to HTML if coming from an encoding
|
||||
# that might have them.
|
||||
if (self.smart_quotes_to is not None
|
||||
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
|
||||
smart_quotes_re = b"([\x80-\x9f])"
|
||||
smart_quotes_compiled = re.compile(smart_quotes_re)
|
||||
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
|
||||
|
||||
try:
|
||||
#print "Trying to convert document to %s (errors=%s)" % (
|
||||
# proposed, errors)
|
||||
u = self._to_unicode(markup, proposed, errors)
|
||||
self.markup = u
|
||||
self.original_encoding = proposed
|
||||
except Exception as e:
|
||||
#print "That didn't work!"
|
||||
#print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
if not self.is_html:
|
||||
return None
|
||||
return self.detector.declared_encoding
|
||||
|
||||
def find_codec(self, charset):
|
||||
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
|
||||
or (charset and self._codec(charset.replace("-", "")))
|
||||
or (charset and self._codec(charset.replace("-", "_")))
|
||||
or (charset and charset.lower())
|
||||
or charset
|
||||
)
|
||||
if value:
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
def _codec(self, charset):
|
||||
if not charset:
|
||||
return charset
|
||||
codec = None
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
codec = charset
|
||||
except (LookupError, ValueError):
|
||||
pass
|
||||
return codec
|
||||
|
||||
|
||||
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
|
||||
MS_CHARS = {b'\x80': ('euro', '20AC'),
|
||||
b'\x81': ' ',
|
||||
b'\x82': ('sbquo', '201A'),
|
||||
b'\x83': ('fnof', '192'),
|
||||
b'\x84': ('bdquo', '201E'),
|
||||
b'\x85': ('hellip', '2026'),
|
||||
b'\x86': ('dagger', '2020'),
|
||||
b'\x87': ('Dagger', '2021'),
|
||||
b'\x88': ('circ', '2C6'),
|
||||
b'\x89': ('permil', '2030'),
|
||||
b'\x8A': ('Scaron', '160'),
|
||||
b'\x8B': ('lsaquo', '2039'),
|
||||
b'\x8C': ('OElig', '152'),
|
||||
b'\x8D': '?',
|
||||
b'\x8E': ('#x17D', '17D'),
|
||||
b'\x8F': '?',
|
||||
b'\x90': '?',
|
||||
b'\x91': ('lsquo', '2018'),
|
||||
b'\x92': ('rsquo', '2019'),
|
||||
b'\x93': ('ldquo', '201C'),
|
||||
b'\x94': ('rdquo', '201D'),
|
||||
b'\x95': ('bull', '2022'),
|
||||
b'\x96': ('ndash', '2013'),
|
||||
b'\x97': ('mdash', '2014'),
|
||||
b'\x98': ('tilde', '2DC'),
|
||||
b'\x99': ('trade', '2122'),
|
||||
b'\x9a': ('scaron', '161'),
|
||||
b'\x9b': ('rsaquo', '203A'),
|
||||
b'\x9c': ('oelig', '153'),
|
||||
b'\x9d': '?',
|
||||
b'\x9e': ('#x17E', '17E'),
|
||||
b'\x9f': ('Yuml', ''),}
|
||||
|
||||
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
|
||||
# horrors like stripping diacritical marks to turn á into a, but also
|
||||
# contains non-horrors like turning “ into ".
|
||||
MS_CHARS_TO_ASCII = {
|
||||
b'\x80' : 'EUR',
|
||||
b'\x81' : ' ',
|
||||
b'\x82' : ',',
|
||||
b'\x83' : 'f',
|
||||
b'\x84' : ',,',
|
||||
b'\x85' : '...',
|
||||
b'\x86' : '+',
|
||||
b'\x87' : '++',
|
||||
b'\x88' : '^',
|
||||
b'\x89' : '%',
|
||||
b'\x8a' : 'S',
|
||||
b'\x8b' : '<',
|
||||
b'\x8c' : 'OE',
|
||||
b'\x8d' : '?',
|
||||
b'\x8e' : 'Z',
|
||||
b'\x8f' : '?',
|
||||
b'\x90' : '?',
|
||||
b'\x91' : "'",
|
||||
b'\x92' : "'",
|
||||
b'\x93' : '"',
|
||||
b'\x94' : '"',
|
||||
b'\x95' : '*',
|
||||
b'\x96' : '-',
|
||||
b'\x97' : '--',
|
||||
b'\x98' : '~',
|
||||
b'\x99' : '(TM)',
|
||||
b'\x9a' : 's',
|
||||
b'\x9b' : '>',
|
||||
b'\x9c' : 'oe',
|
||||
b'\x9d' : '?',
|
||||
b'\x9e' : 'z',
|
||||
b'\x9f' : 'Y',
|
||||
b'\xa0' : ' ',
|
||||
b'\xa1' : '!',
|
||||
b'\xa2' : 'c',
|
||||
b'\xa3' : 'GBP',
|
||||
b'\xa4' : '$', #This approximation is especially parochial--this is the
|
||||
#generic currency symbol.
|
||||
b'\xa5' : 'YEN',
|
||||
b'\xa6' : '|',
|
||||
b'\xa7' : 'S',
|
||||
b'\xa8' : '..',
|
||||
b'\xa9' : '',
|
||||
b'\xaa' : '(th)',
|
||||
b'\xab' : '<<',
|
||||
b'\xac' : '!',
|
||||
b'\xad' : ' ',
|
||||
b'\xae' : '(R)',
|
||||
b'\xaf' : '-',
|
||||
b'\xb0' : 'o',
|
||||
b'\xb1' : '+-',
|
||||
b'\xb2' : '2',
|
||||
b'\xb3' : '3',
|
||||
b'\xb4' : ("'", 'acute'),
|
||||
b'\xb5' : 'u',
|
||||
b'\xb6' : 'P',
|
||||
b'\xb7' : '*',
|
||||
b'\xb8' : ',',
|
||||
b'\xb9' : '1',
|
||||
b'\xba' : '(th)',
|
||||
b'\xbb' : '>>',
|
||||
b'\xbc' : '1/4',
|
||||
b'\xbd' : '1/2',
|
||||
b'\xbe' : '3/4',
|
||||
b'\xbf' : '?',
|
||||
b'\xc0' : 'A',
|
||||
b'\xc1' : 'A',
|
||||
b'\xc2' : 'A',
|
||||
b'\xc3' : 'A',
|
||||
b'\xc4' : 'A',
|
||||
b'\xc5' : 'A',
|
||||
b'\xc6' : 'AE',
|
||||
b'\xc7' : 'C',
|
||||
b'\xc8' : 'E',
|
||||
b'\xc9' : 'E',
|
||||
b'\xca' : 'E',
|
||||
b'\xcb' : 'E',
|
||||
b'\xcc' : 'I',
|
||||
b'\xcd' : 'I',
|
||||
b'\xce' : 'I',
|
||||
b'\xcf' : 'I',
|
||||
b'\xd0' : 'D',
|
||||
b'\xd1' : 'N',
|
||||
b'\xd2' : 'O',
|
||||
b'\xd3' : 'O',
|
||||
b'\xd4' : 'O',
|
||||
b'\xd5' : 'O',
|
||||
b'\xd6' : 'O',
|
||||
b'\xd7' : '*',
|
||||
b'\xd8' : 'O',
|
||||
b'\xd9' : 'U',
|
||||
b'\xda' : 'U',
|
||||
b'\xdb' : 'U',
|
||||
b'\xdc' : 'U',
|
||||
b'\xdd' : 'Y',
|
||||
b'\xde' : 'b',
|
||||
b'\xdf' : 'B',
|
||||
b'\xe0' : 'a',
|
||||
b'\xe1' : 'a',
|
||||
b'\xe2' : 'a',
|
||||
b'\xe3' : 'a',
|
||||
b'\xe4' : 'a',
|
||||
b'\xe5' : 'a',
|
||||
b'\xe6' : 'ae',
|
||||
b'\xe7' : 'c',
|
||||
b'\xe8' : 'e',
|
||||
b'\xe9' : 'e',
|
||||
b'\xea' : 'e',
|
||||
b'\xeb' : 'e',
|
||||
b'\xec' : 'i',
|
||||
b'\xed' : 'i',
|
||||
b'\xee' : 'i',
|
||||
b'\xef' : 'i',
|
||||
b'\xf0' : 'o',
|
||||
b'\xf1' : 'n',
|
||||
b'\xf2' : 'o',
|
||||
b'\xf3' : 'o',
|
||||
b'\xf4' : 'o',
|
||||
b'\xf5' : 'o',
|
||||
b'\xf6' : 'o',
|
||||
b'\xf7' : '/',
|
||||
b'\xf8' : 'o',
|
||||
b'\xf9' : 'u',
|
||||
b'\xfa' : 'u',
|
||||
b'\xfb' : 'u',
|
||||
b'\xfc' : 'u',
|
||||
b'\xfd' : 'y',
|
||||
b'\xfe' : 'b',
|
||||
b'\xff' : 'y',
|
||||
}
|
||||
|
||||
# A map used when removing rogue Windows-1252/ISO-8859-1
|
||||
# characters in otherwise UTF-8 documents.
|
||||
#
|
||||
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
|
||||
# Windows-1252.
|
||||
WINDOWS_1252_TO_UTF8 = {
|
||||
0x80 : b'\xe2\x82\xac', # €
|
||||
0x82 : b'\xe2\x80\x9a', # ‚
|
||||
0x83 : b'\xc6\x92', # ƒ
|
||||
0x84 : b'\xe2\x80\x9e', # „
|
||||
0x85 : b'\xe2\x80\xa6', # …
|
||||
0x86 : b'\xe2\x80\xa0', # †
|
||||
0x87 : b'\xe2\x80\xa1', # ‡
|
||||
0x88 : b'\xcb\x86', # ˆ
|
||||
0x89 : b'\xe2\x80\xb0', # ‰
|
||||
0x8a : b'\xc5\xa0', # Š
|
||||
0x8b : b'\xe2\x80\xb9', # ‹
|
||||
0x8c : b'\xc5\x92', # Œ
|
||||
0x8e : b'\xc5\xbd', # Ž
|
||||
0x91 : b'\xe2\x80\x98', # ‘
|
||||
0x92 : b'\xe2\x80\x99', # ’
|
||||
0x93 : b'\xe2\x80\x9c', # “
|
||||
0x94 : b'\xe2\x80\x9d', # ”
|
||||
0x95 : b'\xe2\x80\xa2', # •
|
||||
0x96 : b'\xe2\x80\x93', # –
|
||||
0x97 : b'\xe2\x80\x94', # —
|
||||
0x98 : b'\xcb\x9c', # ˜
|
||||
0x99 : b'\xe2\x84\xa2', # ™
|
||||
0x9a : b'\xc5\xa1', # š
|
||||
0x9b : b'\xe2\x80\xba', # ›
|
||||
0x9c : b'\xc5\x93', # œ
|
||||
0x9e : b'\xc5\xbe', # ž
|
||||
0x9f : b'\xc5\xb8', # Ÿ
|
||||
0xa0 : b'\xc2\xa0', #
|
||||
0xa1 : b'\xc2\xa1', # ¡
|
||||
0xa2 : b'\xc2\xa2', # ¢
|
||||
0xa3 : b'\xc2\xa3', # £
|
||||
0xa4 : b'\xc2\xa4', # ¤
|
||||
0xa5 : b'\xc2\xa5', # ¥
|
||||
0xa6 : b'\xc2\xa6', # ¦
|
||||
0xa7 : b'\xc2\xa7', # §
|
||||
0xa8 : b'\xc2\xa8', # ¨
|
||||
0xa9 : b'\xc2\xa9', # ©
|
||||
0xaa : b'\xc2\xaa', # ª
|
||||
0xab : b'\xc2\xab', # «
|
||||
0xac : b'\xc2\xac', # ¬
|
||||
0xad : b'\xc2\xad', #
|
||||
0xae : b'\xc2\xae', # ®
|
||||
0xaf : b'\xc2\xaf', # ¯
|
||||
0xb0 : b'\xc2\xb0', # °
|
||||
0xb1 : b'\xc2\xb1', # ±
|
||||
0xb2 : b'\xc2\xb2', # ²
|
||||
0xb3 : b'\xc2\xb3', # ³
|
||||
0xb4 : b'\xc2\xb4', # ´
|
||||
0xb5 : b'\xc2\xb5', # µ
|
||||
0xb6 : b'\xc2\xb6', # ¶
|
||||
0xb7 : b'\xc2\xb7', # ·
|
||||
0xb8 : b'\xc2\xb8', # ¸
|
||||
0xb9 : b'\xc2\xb9', # ¹
|
||||
0xba : b'\xc2\xba', # º
|
||||
0xbb : b'\xc2\xbb', # »
|
||||
0xbc : b'\xc2\xbc', # ¼
|
||||
0xbd : b'\xc2\xbd', # ½
|
||||
0xbe : b'\xc2\xbe', # ¾
|
||||
0xbf : b'\xc2\xbf', # ¿
|
||||
0xc0 : b'\xc3\x80', # À
|
||||
0xc1 : b'\xc3\x81', # Á
|
||||
0xc2 : b'\xc3\x82', # Â
|
||||
0xc3 : b'\xc3\x83', # Ã
|
||||
0xc4 : b'\xc3\x84', # Ä
|
||||
0xc5 : b'\xc3\x85', # Å
|
||||
0xc6 : b'\xc3\x86', # Æ
|
||||
0xc7 : b'\xc3\x87', # Ç
|
||||
0xc8 : b'\xc3\x88', # È
|
||||
0xc9 : b'\xc3\x89', # É
|
||||
0xca : b'\xc3\x8a', # Ê
|
||||
0xcb : b'\xc3\x8b', # Ë
|
||||
0xcc : b'\xc3\x8c', # Ì
|
||||
0xcd : b'\xc3\x8d', # Í
|
||||
0xce : b'\xc3\x8e', # Î
|
||||
0xcf : b'\xc3\x8f', # Ï
|
||||
0xd0 : b'\xc3\x90', # Ð
|
||||
0xd1 : b'\xc3\x91', # Ñ
|
||||
0xd2 : b'\xc3\x92', # Ò
|
||||
0xd3 : b'\xc3\x93', # Ó
|
||||
0xd4 : b'\xc3\x94', # Ô
|
||||
0xd5 : b'\xc3\x95', # Õ
|
||||
0xd6 : b'\xc3\x96', # Ö
|
||||
0xd7 : b'\xc3\x97', # ×
|
||||
0xd8 : b'\xc3\x98', # Ø
|
||||
0xd9 : b'\xc3\x99', # Ù
|
||||
0xda : b'\xc3\x9a', # Ú
|
||||
0xdb : b'\xc3\x9b', # Û
|
||||
0xdc : b'\xc3\x9c', # Ü
|
||||
0xdd : b'\xc3\x9d', # Ý
|
||||
0xde : b'\xc3\x9e', # Þ
|
||||
0xdf : b'\xc3\x9f', # ß
|
||||
0xe0 : b'\xc3\xa0', # à
|
||||
0xe1 : b'\xa1', # á
|
||||
0xe2 : b'\xc3\xa2', # â
|
||||
0xe3 : b'\xc3\xa3', # ã
|
||||
0xe4 : b'\xc3\xa4', # ä
|
||||
0xe5 : b'\xc3\xa5', # å
|
||||
0xe6 : b'\xc3\xa6', # æ
|
||||
0xe7 : b'\xc3\xa7', # ç
|
||||
0xe8 : b'\xc3\xa8', # è
|
||||
0xe9 : b'\xc3\xa9', # é
|
||||
0xea : b'\xc3\xaa', # ê
|
||||
0xeb : b'\xc3\xab', # ë
|
||||
0xec : b'\xc3\xac', # ì
|
||||
0xed : b'\xc3\xad', # í
|
||||
0xee : b'\xc3\xae', # î
|
||||
0xef : b'\xc3\xaf', # ï
|
||||
0xf0 : b'\xc3\xb0', # ð
|
||||
0xf1 : b'\xc3\xb1', # ñ
|
||||
0xf2 : b'\xc3\xb2', # ò
|
||||
0xf3 : b'\xc3\xb3', # ó
|
||||
0xf4 : b'\xc3\xb4', # ô
|
||||
0xf5 : b'\xc3\xb5', # õ
|
||||
0xf6 : b'\xc3\xb6', # ö
|
||||
0xf7 : b'\xc3\xb7', # ÷
|
||||
0xf8 : b'\xc3\xb8', # ø
|
||||
0xf9 : b'\xc3\xb9', # ù
|
||||
0xfa : b'\xc3\xba', # ú
|
||||
0xfb : b'\xc3\xbb', # û
|
||||
0xfc : b'\xc3\xbc', # ü
|
||||
0xfd : b'\xc3\xbd', # ý
|
||||
0xfe : b'\xc3\xbe', # þ
|
||||
}
|
||||
|
||||
MULTIBYTE_MARKERS_AND_SIZES = [
|
||||
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
|
||||
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
|
||||
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
|
||||
]
|
||||
|
||||
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
|
||||
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
|
||||
|
||||
@classmethod
|
||||
def detwingle(cls, in_bytes, main_encoding="utf8",
|
||||
embedded_encoding="windows-1252"):
|
||||
"""Fix characters from one encoding embedded in some other encoding.
|
||||
|
||||
Currently the only situation supported is Windows-1252 (or its
|
||||
subset ISO-8859-1), embedded in UTF-8.
|
||||
|
||||
The input must be a bytestring. If you've already converted
|
||||
the document to Unicode, you're too late.
|
||||
|
||||
The output is a bytestring in which `embedded_encoding`
|
||||
characters have been converted to their `main_encoding`
|
||||
equivalents.
|
||||
"""
|
||||
if embedded_encoding.replace('_', '-').lower() not in (
|
||||
'windows-1252', 'windows_1252'):
|
||||
raise NotImplementedError(
|
||||
"Windows-1252 and ISO-8859-1 are the only currently supported "
|
||||
"embedded encodings.")
|
||||
|
||||
if main_encoding.lower() not in ('utf8', 'utf-8'):
|
||||
raise NotImplementedError(
|
||||
"UTF-8 is the only currently supported main encoding.")
|
||||
|
||||
byte_chunks = []
|
||||
|
||||
chunk_start = 0
|
||||
pos = 0
|
||||
while pos < len(in_bytes):
|
||||
byte = in_bytes[pos]
|
||||
if not isinstance(byte, int):
|
||||
# Python 2.x
|
||||
byte = ord(byte)
|
||||
if (byte >= cls.FIRST_MULTIBYTE_MARKER
|
||||
and byte <= cls.LAST_MULTIBYTE_MARKER):
|
||||
# This is the start of a UTF-8 multibyte character. Skip
|
||||
# to the end.
|
||||
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
|
||||
if byte >= start and byte <= end:
|
||||
pos += size
|
||||
break
|
||||
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
|
||||
# We found a Windows-1252 character!
|
||||
# Save the string up to this point as a chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:pos])
|
||||
|
||||
# Now translate the Windows-1252 character into UTF-8
|
||||
# and add it as another, one-byte chunk.
|
||||
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
|
||||
pos += 1
|
||||
chunk_start = pos
|
||||
else:
|
||||
# Go on to the next character.
|
||||
pos += 1
|
||||
if chunk_start == 0:
|
||||
# The string is unchanged.
|
||||
return in_bytes
|
||||
else:
|
||||
# Store the final chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:])
|
||||
return b''.join(byte_chunks)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
import cProfile
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
|
||||
import os
|
||||
import pstats
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import sys
|
||||
import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
for builder in builder_registry.builders:
|
||||
if name in builder.features:
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
|
||||
This lets you see how lxml parses a document when no Beautiful
|
||||
Soup code is running.
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
|
||||
def _p(self, s):
|
||||
print(s)
|
||||
|
||||
def handle_starttag(self, name, attrs):
|
||||
self._p("%s START" % name)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self._p("%s END" % name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self._p("%s DATA" % data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
self._p("%s CHARREF" % name)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
self._p("%s ENTITYREF" % name)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self._p("%s COMMENT" % data)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self._p("%s DECL" % data)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
self._p("%s UNKNOWN-DECL" % data)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self._p("%s PI" % data)
|
||||
|
||||
def htmlparser_trace(data):
|
||||
"""Print out the HTMLParser events that occur during parsing.
|
||||
|
||||
This lets you see how HTMLParser parses a document when no
|
||||
Beautiful Soup code is running.
|
||||
"""
|
||||
parser = AnnouncingParser()
|
||||
parser.feed(data)
|
||||
|
||||
_vowels = "aeiou"
|
||||
_consonants = "bcdfghjklmnpqrstvwxyz"
|
||||
|
||||
def rword(length=5):
|
||||
"Generate a random word-like string."
|
||||
s = ''
|
||||
for i in range(length):
|
||||
if i % 2 == 0:
|
||||
t = _consonants
|
||||
else:
|
||||
t = _vowels
|
||||
s += random.choice(t)
|
||||
return s
|
||||
|
||||
def rsentence(length=4):
|
||||
"Generate a random sentence-like string."
|
||||
return " ".join(rword(random.randint(4,9)) for i in range(length))
|
||||
|
||||
def rdoc(num_elements=1000):
|
||||
"""Randomly generate an invalid HTML document."""
|
||||
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
|
||||
elements = []
|
||||
for i in range(num_elements):
|
||||
choice = random.randint(0,3)
|
||||
if choice == 0:
|
||||
# New tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("<%s>" % tag_name)
|
||||
elif choice == 1:
|
||||
elements.append(rsentence(random.randint(1,4)))
|
||||
elif choice == 2:
|
||||
# Close a tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("</%s>" % tag_name)
|
||||
return "<html>" + "\n".join(elements) + "</html>"
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
try:
|
||||
a = time.time()
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
|
||||
data = rdoc(num_elements)
|
||||
vars = dict(bs4=bs4, data=data, parser=parser)
|
||||
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
|
||||
|
||||
stats = pstats.Stats(filename)
|
||||
# stats.strip_dirs()
|
||||
stats.sort_stats("cumulative")
|
||||
stats.print_stats('_html5lib|bs4', 50)
|
||||
|
||||
if __name__ == '__main__':
|
||||
diagnose(sys.stdin.read())
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,592 +0,0 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
Comment,
|
||||
ContentMetaAttributeValue,
|
||||
Doctype,
|
||||
SoupStrainer,
|
||||
)
|
||||
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
default_builder = HTMLParserTreeBuilder
|
||||
|
||||
|
||||
class SoupTest(unittest.TestCase):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return default_builder()
|
||||
|
||||
def soup(self, markup, **kwargs):
|
||||
"""Build a Beautiful Soup object from markup."""
|
||||
builder = kwargs.pop('builder', self.default_builder)
|
||||
return BeautifulSoup(markup, builder=builder, **kwargs)
|
||||
|
||||
def document_for(self, markup):
|
||||
"""Turn an HTML fragment into a document.
|
||||
|
||||
The details depend on the builder.
|
||||
"""
|
||||
return self.default_builder.test_fragment_to_document(markup)
|
||||
|
||||
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
|
||||
builder = self.default_builder
|
||||
obj = BeautifulSoup(to_parse, builder=builder)
|
||||
if compare_parsed_to is None:
|
||||
compare_parsed_to = to_parse
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
"""A basic test of a treebuilder's competence.
|
||||
|
||||
Any HTML treebuilder, present or future, should be able to pass
|
||||
these tests. With invalid markup, there's room for interpretation,
|
||||
and different parsers can handle it differently. But with the
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
|
||||
# Make sure a Doctype object was created.
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual(doctype.__class__, Doctype)
|
||||
self.assertEqual(doctype, doctype_fragment)
|
||||
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
|
||||
|
||||
# Make sure that the doctype was correctly associated with the
|
||||
# parse tree and that the rest of the document parsed.
|
||||
self.assertEqual(soup.p.contents[0], 'foo')
|
||||
|
||||
def _document_with_doctype(self, doctype_fragment):
|
||||
"""Generate and parse a document with the given doctype."""
|
||||
doctype = '<!DOCTYPE %s>' % doctype_fragment
|
||||
markup = doctype + '\n<p>foo</p>'
|
||||
soup = self.soup(markup)
|
||||
return doctype, soup
|
||||
|
||||
def test_normal_doctypes(self):
|
||||
"""Make sure normal, everyday HTML doctypes are handled correctly."""
|
||||
self.assertDoctypeHandled("html")
|
||||
self.assertDoctypeHandled(
|
||||
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
|
||||
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_public_doctype_with_url(self):
|
||||
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
|
||||
self.assertDoctypeHandled(doctype)
|
||||
|
||||
def test_system_doctype(self):
|
||||
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# We can handle a namespaced doctype with a system ID.
|
||||
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# Test a namespaced doctype with a public id.
|
||||
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out more or less the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
This is important because the builder is part of a
|
||||
BeautifulSoup object, and we want to be able to copy that.
|
||||
"""
|
||||
copy.deepcopy(self.default_builder)
|
||||
|
||||
def test_p_tag_is_never_empty_element(self):
|
||||
"""A <p> tag is never designated as an empty-element tag.
|
||||
|
||||
Even if the markup shows it as an empty-element tag, it
|
||||
shouldn't be presented that way.
|
||||
"""
|
||||
soup = self.soup("<p/>")
|
||||
self.assertFalse(soup.p.is_empty_element)
|
||||
self.assertEqual(str(soup.p), "<p></p>")
|
||||
|
||||
def test_unclosed_tags_get_closed(self):
|
||||
"""A tag that's not closed by the end of the document should be closed.
|
||||
|
||||
This applies to all tags except empty-element tags.
|
||||
"""
|
||||
self.assertSoupEquals("<p>", "<p></p>")
|
||||
self.assertSoupEquals("<b>", "<b></b>")
|
||||
|
||||
self.assertSoupEquals("<br>", "<br/>")
|
||||
|
||||
def test_br_is_always_empty_element_tag(self):
|
||||
"""A <br> tag is designated as an empty-element tag.
|
||||
|
||||
Some parsers treat <br></br> as one <br/> tag, some parsers as
|
||||
two tags, but it should always be an empty-element tag.
|
||||
"""
|
||||
soup = self.soup("<br></br>")
|
||||
self.assertTrue(soup.br.is_empty_element)
|
||||
self.assertEqual(str(soup.br), "<br/>")
|
||||
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
self.assertSoupEquals(markup)
|
||||
|
||||
soup = self.soup(markup)
|
||||
comment = soup.find(text="foobar")
|
||||
self.assertEqual(comment.__class__, Comment)
|
||||
|
||||
# The comment is properly integrated into the tree.
|
||||
foo = soup.find(text="foo")
|
||||
self.assertEqual(comment, foo.next_element)
|
||||
baz = soup.find(text="baz")
|
||||
self.assertEqual(comment, baz.previous_element)
|
||||
|
||||
def test_preserved_whitespace_in_pre_and_textarea(self):
|
||||
"""Whitespace must be preserved in <pre> and <textarea> tags."""
|
||||
self.assertSoupEquals("<pre> </pre>")
|
||||
self.assertSoupEquals("<textarea> woo </textarea>")
|
||||
|
||||
def test_nested_inline_elements(self):
|
||||
"""Inline elements can be nested indefinitely."""
|
||||
b_tag = "<b>Inside a B tag</b>"
|
||||
self.assertSoupEquals(b_tag)
|
||||
|
||||
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
def test_nested_block_level_elements(self):
|
||||
"""Block elements can be nested."""
|
||||
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
|
||||
blockquote = soup.blockquote
|
||||
self.assertEqual(blockquote.p.b.string, 'Foo')
|
||||
self.assertEqual(blockquote.b.string, 'Foo')
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""One table can go inside another one."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tr><td>foo</td></tr></table>'
|
||||
'</td></tr></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_deeply_nested_multivalued_attribute(self):
|
||||
# html5lib can set the attributes of the same tag many times
|
||||
# as it rearranges the tree. This has caused problems with
|
||||
# multivalued attributes.
|
||||
markup = '<table><div><div class="css"></div></div></table>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
|
||||
def test_quot_entity_converted_to_quotation_mark(self):
|
||||
self.assertSoupEquals("<p>I said "good day!"</p>",
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
|
||||
def test_multipart_strings(self):
|
||||
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
very least they should not choke on namespaces or lose
|
||||
data."""
|
||||
|
||||
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode())
|
||||
html = soup.html
|
||||
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
|
||||
|
||||
def test_multivalued_attribute_value_becomes_list(self):
|
||||
markup = b'<a class="foo bar">'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(['foo', 'bar'], soup.a['class'])
|
||||
|
||||
#
|
||||
# Generally speaking, tests below this point are more tests of
|
||||
# Beautiful Soup than tests of the tree builders. But parsers are
|
||||
# weird, so we run these tests separately for every tree builder
|
||||
# to detect any differences between them.
|
||||
#
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
|
||||
parse_only=strainer)
|
||||
self.assertEqual(soup.decode(), "<b>bold</b>")
|
||||
|
||||
def test_single_quote_attribute_values_become_double_quotes(self):
|
||||
self.assertSoupEquals("<foo attr='bar'></foo>",
|
||||
'<foo attr="bar"></foo>')
|
||||
|
||||
def test_attribute_values_with_nested_quotes_are_left_alone(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
self.assertSoupEquals(text)
|
||||
|
||||
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
soup = self.soup(text)
|
||||
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
|
||||
self.assertSoupEquals(
|
||||
soup.foo.decode(),
|
||||
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
|
||||
|
||||
def test_ampersand_in_attribute_value_gets_escaped(self):
|
||||
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
|
||||
'<this is="really messed up & stuff"></this>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>',
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>')
|
||||
|
||||
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
|
||||
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
|
||||
|
||||
def test_entities_in_strings_converted_during_parsing(self):
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
# Microsoft smart quotes are converted to Unicode characters during
|
||||
# parsing.
|
||||
quote = b"<p>\x91Foo\x92</p>"
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
def test_real_iso_latin_document(self):
|
||||
# Smoke test of interrelated functionality, using an
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
iso_latin_html = unicode_html.encode("iso-8859-1")
|
||||
|
||||
# Parse the ISO-Latin-1 HTML.
|
||||
soup = self.soup(iso_latin_html)
|
||||
# Encode it to UTF-8.
|
||||
result = soup.encode("utf-8")
|
||||
|
||||
# What do we expect the result to look like? Well, it would
|
||||
# look like unicode_html, except that the META tag would say
|
||||
# UTF-8 instead of ISO-Latin-1.
|
||||
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
|
||||
|
||||
# And, of course, it would be in UTF-8, not Unicode.
|
||||
expected = expected.encode("utf-8")
|
||||
|
||||
# Ta-da!
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_real_shift_jis_document(self):
|
||||
# Smoke test to make sure the parser can handle a document in
|
||||
# Shift-JIS encoding, without choking.
|
||||
shift_jis_html = (
|
||||
b'<html><head></head><body><pre>'
|
||||
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
|
||||
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
|
||||
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
|
||||
b'</pre></body></html>')
|
||||
unicode_html = shift_jis_html.decode("shift-jis")
|
||||
soup = self.soup(unicode_html)
|
||||
|
||||
# Make sure the parse tree is correctly encoded to various
|
||||
# encodings.
|
||||
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
|
||||
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
|
||||
|
||||
def test_real_hebrew_document(self):
|
||||
# A real-world test to make sure we can convert ISO-8859-9 (a
|
||||
# Hebrew encoding) to UTF-8.
|
||||
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
|
||||
soup = self.soup(
|
||||
hebrew_document, from_encoding="iso8859-8")
|
||||
self.assertEqual(soup.original_encoding, 'iso8859-8')
|
||||
self.assertEqual(
|
||||
soup.encode('utf-8'),
|
||||
hebrew_document.decode("iso8859-8").encode("utf-8"))
|
||||
|
||||
def test_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta content="text/html; charset=x-sjis" '
|
||||
'http-equiv="Content-type"/>')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
|
||||
content = parsed_meta['content']
|
||||
self.assertEqual('text/html; charset=x-sjis', content)
|
||||
|
||||
# But that value is actually a ContentMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
|
||||
|
||||
# For the rest of the story, see TestSubstitutions in
|
||||
# test_tree.py.
|
||||
|
||||
def test_html5_style_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', id="encoding")
|
||||
charset = parsed_meta['charset']
|
||||
self.assertEqual('x-sjis', charset)
|
||||
|
||||
# But that value is actually a CharsetMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('utf8', charset.encode("utf8"))
|
||||
|
||||
def test_tag_with_no_attributes_can_have_attributes_added(self):
|
||||
data = self.soup("<a>text</a>")
|
||||
data.a['foo'] = 'bar'
|
||||
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8"), markup)
|
||||
|
||||
def test_formatter_processes_script_tag_for_xml_documents(self):
|
||||
doc = """
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
encoded = soup.encode()
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode("latin1"),
|
||||
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
|
||||
|
||||
def test_large_xml_document(self):
|
||||
"""A large XML document should come out the same as it went in."""
|
||||
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
|
||||
+ b'0' * (2**12)
|
||||
+ b'</root>')
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(soup.encode("utf-8"), markup)
|
||||
|
||||
|
||||
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
|
||||
self.assertSoupEquals("<p>", "<p/>")
|
||||
self.assertSoupEquals("<p>foo</p>")
|
||||
|
||||
def test_namespaces_are_preserved(self):
|
||||
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
|
||||
soup = self.soup(markup)
|
||||
root = soup.root
|
||||
self.assertEqual("http://example.com/", root['xmlns:a'])
|
||||
self.assertEqual("http://example.net/", root['xmlns:b'])
|
||||
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
|
||||
# XHTML documents in any particular way.
|
||||
pass
|
||||
|
||||
def test_html_tags_have_namespace(self):
|
||||
markup = "<a>"
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
|
||||
|
||||
def test_svg_tags_have_namespace(self):
|
||||
markup = '<svg><circle/></svg>'
|
||||
soup = self.soup(markup)
|
||||
namespace = "http://www.w3.org/2000/svg"
|
||||
self.assertEqual(namespace, soup.svg.namespace)
|
||||
self.assertEqual(namespace, soup.circle.namespace)
|
||||
|
||||
|
||||
def test_mathml_tags_have_namespace(self):
|
||||
markup = '<math><msqrt>5</msqrt></math>'
|
||||
soup = self.soup(markup)
|
||||
namespace = 'http://www.w3.org/1998/Math/MathML'
|
||||
self.assertEqual(namespace, soup.math.namespace)
|
||||
self.assertEqual(namespace, soup.msqrt.namespace)
|
||||
|
||||
def test_xml_declaration_becomes_comment(self):
|
||||
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertTrue(isinstance(soup.contents[0], Comment))
|
||||
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
|
||||
self.assertEqual("html", soup.contents[0].next_element.name)
|
||||
|
||||
def skipIf(condition, reason):
|
||||
def nothing(test, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def decorator(test_item):
|
||||
if condition:
|
||||
return nothing
|
||||
else:
|
||||
return test_item
|
||||
|
||||
return decorator
|
||||
@@ -1 +0,0 @@
|
||||
"The beautifulsoup tests."
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
builder_registry as registry,
|
||||
HTMLParserTreeBuilder,
|
||||
TreeBuilderRegistry,
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError:
|
||||
HTML5LIB_PRESENT = False
|
||||
|
||||
try:
|
||||
from bs4.builder import (
|
||||
LXMLTreeBuilderForXML,
|
||||
LXMLTreeBuilder,
|
||||
)
|
||||
LXML_PRESENT = True
|
||||
except ImportError:
|
||||
LXML_PRESENT = False
|
||||
|
||||
|
||||
class BuiltInRegistryTest(unittest.TestCase):
|
||||
"""Test the built-in registry with the default builders registered."""
|
||||
|
||||
def test_combination(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('fast', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('permissive', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('strict', 'html'),
|
||||
HTMLParserTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib', 'html'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
def test_lookup_by_markup_type(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
|
||||
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('xml'), None)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
|
||||
|
||||
def test_named_library(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('lxml', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('lxml', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
self.assertEqual(registry.lookup('html.parser'),
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
self.assertRaises(ValueError, BeautifulSoup,
|
||||
"", features="no-such-feature")
|
||||
|
||||
class RegistryTest(unittest.TestCase):
|
||||
"""Test the TreeBuilderRegistry class in general."""
|
||||
|
||||
def setUp(self):
|
||||
self.registry = TreeBuilderRegistry()
|
||||
|
||||
def builder_for_features(self, *feature_list):
|
||||
cls = type('Builder_' + '_'.join(feature_list),
|
||||
(object,), {'features' : feature_list})
|
||||
|
||||
self.registry.register(cls)
|
||||
return cls
|
||||
|
||||
def test_register_with_no_features(self):
|
||||
builder = self.builder_for_features()
|
||||
|
||||
# Since the builder advertises no features, you can't find it
|
||||
# by looking up features.
|
||||
self.assertEqual(self.registry.lookup('foo'), None)
|
||||
|
||||
# But you can find it by doing a lookup with no features, if
|
||||
# this happens to be the only registered builder.
|
||||
self.assertEqual(self.registry.lookup(), builder)
|
||||
|
||||
def test_register_with_features_makes_lookup_succeed(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('foo'), builder)
|
||||
self.assertEqual(self.registry.lookup('bar'), builder)
|
||||
|
||||
def test_lookup_fails_when_no_builder_implements_feature(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('baz'), None)
|
||||
|
||||
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
|
||||
builder1 = self.builder_for_features('foo')
|
||||
builder2 = self.builder_for_features('bar')
|
||||
self.assertEqual(self.registry.lookup(), builder2)
|
||||
|
||||
def test_lookup_fails_when_no_tree_builders_registered(self):
|
||||
self.assertEqual(self.registry.lookup(), None)
|
||||
|
||||
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
|
||||
has_one = self.builder_for_features('foo')
|
||||
has_the_other = self.builder_for_features('bar')
|
||||
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
|
||||
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
|
||||
lacks_one = self.builder_for_features('bar')
|
||||
has_the_other = self.builder_for_features('foo')
|
||||
|
||||
# There are two builders featuring 'foo' and 'bar', but
|
||||
# the one that also features 'quux' was registered later.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar'),
|
||||
has_both_late)
|
||||
|
||||
# There is only one builder featuring 'foo', 'bar', and 'baz'.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
|
||||
has_both_early)
|
||||
|
||||
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
|
||||
builder1 = self.builder_for_features('foo', 'bar')
|
||||
builder2 = self.builder_for_features('foo', 'baz')
|
||||
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
||||
@@ -1,36 +0,0 @@
|
||||
"Test harness for doctests."
|
||||
|
||||
# pylint: disable-msg=E0611,W0142
|
||||
|
||||
__metaclass__ = type
|
||||
__all__ = [
|
||||
'additional_tests',
|
||||
]
|
||||
|
||||
import atexit
|
||||
import doctest
|
||||
import os
|
||||
#from pkg_resources import (
|
||||
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
|
||||
import unittest
|
||||
|
||||
DOCTEST_FLAGS = (
|
||||
doctest.ELLIPSIS |
|
||||
doctest.NORMALIZE_WHITESPACE |
|
||||
doctest.REPORT_NDIFF)
|
||||
|
||||
|
||||
# def additional_tests():
|
||||
# "Run the doc tests (README.txt and docs/*, if any exist)"
|
||||
# doctest_files = [
|
||||
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
|
||||
# if resource_exists('bs4', 'docs'):
|
||||
# for name in resource_listdir('bs4', 'docs'):
|
||||
# if name.endswith('.txt'):
|
||||
# doctest_files.append(
|
||||
# os.path.abspath(
|
||||
# resource_filename('bs4', 'docs/%s' % name)))
|
||||
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
|
||||
# atexit.register(cleanup_resources)
|
||||
# return unittest.TestSuite((
|
||||
# doctest.DocFileSuite(*doctest_files, **kwargs)))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Tests to ensure that the html5lib tree builder generates good trees."""
|
||||
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
HTML5TreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not HTML5LIB_PRESENT,
|
||||
"html5lib seems not to be present, not testing its tree builder.")
|
||||
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
"""See ``HTML5TreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTML5TreeBuilder()
|
||||
|
||||
def test_soupstrainer(self):
|
||||
# The html5lib tree builder does not support SoupStrainers.
|
||||
strainer = SoupStrainer("b")
|
||||
markup = "<p>A <b>bold</b> statement.</p>"
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(
|
||||
soup.decode(), self.document_for(markup))
|
||||
|
||||
self.assertTrue(
|
||||
"the html5lib tree builder doesn't support parse_only" in
|
||||
str(w[0].message))
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""html5lib inserts <tbody> tags where other parsers don't."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tbody><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
|
||||
'</td></tr></tbody></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_xml_declaration_followed_by_doctype(self):
|
||||
markup = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<p>foo</p>
|
||||
</body>
|
||||
</html>'''
|
||||
soup = self.soup(markup)
|
||||
# Verify that we can reach the <p> tag; this means the tree is connected.
|
||||
self.assertEqual(b"<p>foo</p>", soup.p.encode())
|
||||
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Tests to ensure that the html.parser tree builder generates good
|
||||
trees."""
|
||||
|
||||
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
|
||||
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTMLParserTreeBuilder()
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Tests to ensure that the lxml tree builder generates good trees."""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import lxml.etree
|
||||
LXML_PRESENT = True
|
||||
LXML_VERSION = lxml.etree.LXML_VERSION
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
LXML_VERSION = (0,)
|
||||
|
||||
if LXML_PRESENT:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, SoupStrainer
|
||||
from bs4.testing import skipIf
|
||||
from bs4.tests import test_htmlparser
|
||||
from bs4.testing import (
|
||||
HTMLTreeBuilderSmokeTest,
|
||||
XMLTreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its tree builder.")
|
||||
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilder()
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
|
||||
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
|
||||
# test if an old version of lxml is installed.
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
|
||||
"Skipping doctype test for old version of lxml to avoid segfault.")
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_beautifulstonesoup_is_xml_parser(self):
|
||||
# Make sure that the deprecated BSS class uses an xml builder
|
||||
# if one is installed.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = BeautifulStoneSoup("<b />")
|
||||
self.assertEqual(u"<b/>", unicode(soup.b))
|
||||
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""lxml strips the XML definition from an XHTML doc, which is fine."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b''),
|
||||
markup.replace(b'\n', b'').replace(
|
||||
b'<?xml version="1.0" encoding="utf-8"?>', b''))
|
||||
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its XML tree builder.")
|
||||
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilderForXML()
|
||||
@@ -1,434 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests of Beautiful Soup as a whole."""
|
||||
|
||||
import logging
|
||||
import unittest
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
SoupStrainer,
|
||||
NamespacedAttribute,
|
||||
)
|
||||
import bs4.dammit
|
||||
from bs4.dammit import (
|
||||
EntitySubstitution,
|
||||
UnicodeDammit,
|
||||
)
|
||||
from bs4.testing import (
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
LXML_PRESENT = True
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
|
||||
PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
|
||||
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
|
||||
|
||||
class TestConstructor(SoupTest):
|
||||
|
||||
def test_short_unicode_input(self):
|
||||
data = u"<h1>éé</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"éé", soup.h1.string)
|
||||
|
||||
def test_embedded_null(self):
|
||||
data = u"<h1>foo\0bar</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"foo\0bar", soup.h1.string)
|
||||
|
||||
|
||||
class TestDeprecatedConstructorArguments(SoupTest):
|
||||
|
||||
def test_parseOnlyThese_renamed_to_parse_only(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("parseOnlyThese" in msg)
|
||||
self.assertTrue("parse_only" in msg)
|
||||
self.assertEqual(b"<b></b>", soup.encode())
|
||||
|
||||
def test_fromEncoding_renamed_to_from_encoding(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
utf8 = b"\xc3\xa9"
|
||||
soup = self.soup(utf8, fromEncoding="utf8")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("fromEncoding" in msg)
|
||||
self.assertTrue("from_encoding" in msg)
|
||||
self.assertEqual("utf8", soup.original_encoding)
|
||||
|
||||
def test_unrecognized_keyword_argument(self):
|
||||
self.assertRaises(
|
||||
TypeError, self.soup, "<a>", no_such_argument=True)
|
||||
|
||||
class TestWarnings(SoupTest):
|
||||
|
||||
def test_disk_file_warning(self):
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
try:
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a filename" in msg)
|
||||
finally:
|
||||
filehandle.close()
|
||||
|
||||
# The file no longer exists, so Beautiful Soup will no longer issue the warning.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
def test_url_warning(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a URL" in msg)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/ is great")
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
class TestSelectiveParsing(SoupTest):
|
||||
|
||||
def test_parse_with_soupstrainer(self):
|
||||
markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
|
||||
|
||||
|
||||
class TestEntitySubstitution(unittest.TestCase):
|
||||
"""Standalone tests of the EntitySubstitution class."""
|
||||
def setUp(self):
|
||||
self.sub = EntitySubstitution
|
||||
|
||||
def test_simple_html_substitution(self):
|
||||
# Unicode characters corresponding to named HTML entites
|
||||
# are substituted, and no others.
|
||||
s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
|
||||
self.assertEqual(self.sub.substitute_html(s),
|
||||
u"foo∀\N{SNOWMAN}õbar")
|
||||
|
||||
def test_smart_quote_substitution(self):
|
||||
# MS smart quotes are a common source of frustration, so we
|
||||
# give them a special test.
|
||||
quotes = b"\x91\x92foo\x93\x94"
|
||||
dammit = UnicodeDammit(quotes)
|
||||
self.assertEqual(self.sub.substitute_html(dammit.markup),
|
||||
"‘’foo“”")
|
||||
|
||||
def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, False), s)
|
||||
|
||||
def test_xml_attribute_quoting_normally_uses_double_quotes(self):
|
||||
self.assertEqual(self.sub.substitute_xml("Welcome", True),
|
||||
'"Welcome"')
|
||||
self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
|
||||
'"Bob\'s Bar"')
|
||||
|
||||
def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, True),
|
||||
"'Welcome to \"my bar\"'")
|
||||
|
||||
def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
|
||||
s = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml(s, True),
|
||||
'"Welcome to "Bob\'s Bar""')
|
||||
|
||||
def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
|
||||
quoted = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(quoted), quoted)
|
||||
|
||||
def test_xml_quoting_handles_angle_brackets(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("foo<bar>"),
|
||||
"foo<bar>")
|
||||
|
||||
def test_xml_quoting_handles_ampersands(self):
|
||||
self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T")
|
||||
|
||||
def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("ÁT&T"),
|
||||
"&Aacute;T&T")
|
||||
|
||||
def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml_containing_entities("ÁT&T"),
|
||||
"ÁT&T")
|
||||
|
||||
def test_quotes_not_html_substituted(self):
|
||||
"""There's no need to do this except inside attribute values."""
|
||||
text = 'Bob\'s "bar"'
|
||||
self.assertEqual(self.sub.substitute_html(text), text)
|
||||
|
||||
|
||||
class TestEncodingConversion(SoupTest):
|
||||
# Test Beautiful Soup's ability to decode and encode from various
|
||||
# encodings.
|
||||
|
||||
def setUp(self):
|
||||
super(TestEncodingConversion, self).setUp()
|
||||
self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
|
||||
self.utf8_data = self.unicode_data.encode("utf-8")
|
||||
# Just so you know what it looks like.
|
||||
self.assertEqual(
|
||||
self.utf8_data,
|
||||
b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
|
||||
|
||||
def test_ascii_in_unicode_out(self):
|
||||
# ASCII input is converted to Unicode. The original_encoding
|
||||
# attribute is set to 'utf-8', a superset of ASCII.
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
# Disable chardet, which will realize that the ASCII is ASCII.
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
ascii = b"<foo>a</foo>"
|
||||
soup_from_ascii = self.soup(ascii)
|
||||
unicode_output = soup_from_ascii.decode()
|
||||
self.assertTrue(isinstance(unicode_output, unicode))
|
||||
self.assertEqual(unicode_output, self.document_for(ascii.decode()))
|
||||
self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_unicode_in_unicode_out(self):
|
||||
# Unicode input is left alone. The original_encoding attribute
|
||||
# is not set.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
|
||||
self.assertEqual(soup_from_unicode.original_encoding, None)
|
||||
|
||||
def test_utf8_in_unicode_out(self):
|
||||
# UTF-8 input is converted to Unicode. The original_encoding
|
||||
# attribute is set.
|
||||
soup_from_utf8 = self.soup(self.utf8_data)
|
||||
self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
|
||||
|
||||
def test_utf8_out(self):
|
||||
# The internal data structures can be encoded as UTF-8.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
|
||||
|
||||
@skipIf(
|
||||
PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
|
||||
"Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
|
||||
def test_attribute_name_containing_unicode_characters(self):
|
||||
markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
|
||||
self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
|
||||
|
||||
class TestUnicodeDammit(unittest.TestCase):
|
||||
"""Standalone tests of UnicodeDammit."""
|
||||
|
||||
def test_unicode_input(self):
|
||||
markup = u"I'm already Unicode! \N{SNOWMAN}"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(dammit.unicode_markup, markup)
|
||||
|
||||
def test_smart_quotes_to_unicode(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
|
||||
|
||||
def test_smart_quotes_to_xml_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="xml")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_html_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="html")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_ascii(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, """<foo>''""</foo>""")
|
||||
|
||||
def test_detect_utf8(self):
|
||||
utf8 = b"\xc3\xa9"
|
||||
dammit = UnicodeDammit(utf8)
|
||||
self.assertEqual(dammit.unicode_markup, u'\xe9')
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_convert_hebrew(self):
|
||||
hebrew = b"\xed\xe5\xec\xf9"
|
||||
dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
|
||||
self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
|
||||
|
||||
def test_dont_see_smart_quotes_where_there_are_none(self):
|
||||
utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
|
||||
dammit = UnicodeDammit(utf_8)
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
|
||||
|
||||
def test_ignore_inappropriate_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_ignore_invalid_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
for bad_encoding in ['.utf8', '...', 'utF---16.!']:
|
||||
dammit = UnicodeDammit(utf8_data, [bad_encoding])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_detect_html5_style_meta_tag(self):
|
||||
|
||||
for data in (
|
||||
b'<html><meta charset="euc-jp" /></html>',
|
||||
b"<html><meta charset='euc-jp' /></html>",
|
||||
b"<html><meta charset=euc-jp /></html>",
|
||||
b"<html><meta charset=euc-jp/></html>"):
|
||||
dammit = UnicodeDammit(data, is_html=True)
|
||||
self.assertEqual(
|
||||
"euc-jp", dammit.original_encoding)
|
||||
|
||||
def test_last_ditch_entity_replacement(self):
|
||||
# This is a UTF-8 document that contains bytestrings
|
||||
# completely incompatible with UTF-8 (ie. encoded with some other
|
||||
# encoding).
|
||||
#
|
||||
# Since there is no consistent encoding for the document,
|
||||
# Unicode, Dammit will eventually encode the document as UTF-8
|
||||
# and encode the incompatible characters as REPLACEMENT
|
||||
# CHARACTER.
|
||||
#
|
||||
# If chardet is installed, it will detect that the document
|
||||
# can be converted into ISO-8859-1 without errors. This happens
|
||||
# to be the wrong encoding, but it is a consistent encoding, so the
|
||||
# code we're testing here won't run.
|
||||
#
|
||||
# So we temporarily disable chardet if it's present.
|
||||
doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
|
||||
<html><b>\330\250\330\252\330\261</b>
|
||||
<i>\310\322\321\220\312\321\355\344</i></html>"""
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
dammit = UnicodeDammit(doc)
|
||||
self.assertEqual(True, dammit.contains_replacement_characters)
|
||||
self.assertTrue(u"\ufffd" in dammit.unicode_markup)
|
||||
|
||||
soup = BeautifulSoup(doc, "html.parser")
|
||||
self.assertTrue(soup.contains_replacement_characters)
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_byte_order_mark_removed(self):
|
||||
# A document written in UTF-16LE will have its byte order marker stripped.
|
||||
data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
|
||||
dammit = UnicodeDammit(data)
|
||||
self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
|
||||
self.assertEqual("utf-16le", dammit.original_encoding)
|
||||
|
||||
def test_detwingle(self):
|
||||
# Here's a UTF8 document.
|
||||
utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
|
||||
|
||||
# Here's a Windows-1252 document.
|
||||
windows_1252 = (
|
||||
u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
|
||||
u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
|
||||
|
||||
# Through some unholy alchemy, they've been stuck together.
|
||||
doc = utf8 + windows_1252 + utf8
|
||||
|
||||
# The document can't be turned into UTF-8:
|
||||
self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
|
||||
|
||||
# Unicode, Dammit thinks the whole document is Windows-1252,
|
||||
# and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
|
||||
|
||||
# But if we run it through fix_embedded_windows_1252, it's fixed:
|
||||
|
||||
fixed = UnicodeDammit.detwingle(doc)
|
||||
self.assertEqual(
|
||||
u"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
|
||||
|
||||
def test_detwingle_ignores_multibyte_characters(self):
|
||||
# Each of these characters has a UTF-8 representation ending
|
||||
# in \x93. \x93 is a smart quote if interpreted as
|
||||
# Windows-1252. But our code knows to skip over multibyte
|
||||
# UTF-8 characters, so they'll survive the process unscathed.
|
||||
for tricky_unicode_char in (
|
||||
u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
|
||||
u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
|
||||
u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
|
||||
):
|
||||
input = tricky_unicode_char.encode("utf8")
|
||||
self.assertTrue(input.endswith(b'\x93'))
|
||||
output = UnicodeDammit.detwingle(input)
|
||||
self.assertEqual(output, input)
|
||||
|
||||
class TestNamedspacedAttribute(SoupTest):
|
||||
|
||||
def test_name_may_be_none(self):
|
||||
a = NamespacedAttribute("xmlns", None)
|
||||
self.assertEqual(a, "xmlns")
|
||||
|
||||
def test_attribute_is_equivalent_to_colon_separated_string(self):
|
||||
a = NamespacedAttribute("a", "b")
|
||||
self.assertEqual("a:b", a)
|
||||
|
||||
def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
|
||||
a = NamespacedAttribute("a", "b", "c")
|
||||
b = NamespacedAttribute("a", "b", "c")
|
||||
self.assertEqual(a, b)
|
||||
|
||||
# The actual namespace is not considered.
|
||||
c = NamespacedAttribute("a", "b", None)
|
||||
self.assertEqual(a, c)
|
||||
|
||||
# But name and prefix are important.
|
||||
d = NamespacedAttribute("a", "z", "c")
|
||||
self.assertNotEqual(a, d)
|
||||
|
||||
e = NamespacedAttribute("z", "b", "c")
|
||||
self.assertNotEqual(a, e)
|
||||
|
||||
|
||||
class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = CharsetMetaAttributeValue("euc-jp")
|
||||
self.assertEqual("euc-jp", value)
|
||||
self.assertEqual("euc-jp", value.original_value)
|
||||
self.assertEqual("utf8", value.encode("utf8"))
|
||||
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = ContentMetaAttributeValue("text/html; charset=euc-jp")
|
||||
self.assertEqual("text/html; charset=euc-jp", value)
|
||||
self.assertEqual("text/html; charset=euc-jp", value.original_value)
|
||||
self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,16 +15,6 @@ sqlversion = sqlite3.sqlite_version_info
|
||||
if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
|
||||
raise Exception("sqlite3 version 3.3.0 or later is required.")
|
||||
|
||||
#
|
||||
# "No History" mode - for a given query tuple (version, pkgarch, checksum),
|
||||
# the returned value will be the largest among all the values of the same
|
||||
# (version, pkgarch). This means the PR value returned can NOT be decremented.
|
||||
#
|
||||
# "History" mode - Return a new higher value for previously unseen query
|
||||
# tuple (version, pkgarch, checksum), otherwise return historical value.
|
||||
# Value can decrement if returning to a previous build.
|
||||
#
|
||||
|
||||
class PRTable(object):
|
||||
def __init__(self, conn, table, nohist):
|
||||
self.conn = conn
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os,sys,logging
|
||||
import signal, time
|
||||
import signal, time, atexit, threading
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import xmlrpclib
|
||||
import threading
|
||||
import Queue
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.filters import RelatedFieldListFilter
|
||||
from orm.models import BitbakeVersion, Release, LayerSource, ToasterSetting
|
||||
from django.forms.widgets import Textarea
|
||||
from django import forms
|
||||
import django.db.models as models
|
||||
|
||||
from django.contrib.admin import widgets, helpers
|
||||
|
||||
class LayerSourceAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
class BitbakeVersionAdmin(admin.ModelAdmin):
|
||||
|
||||
# we override the formfield for db URLField because of broken URL validation
|
||||
|
||||
def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
if isinstance(db_field, models.fields.URLField):
|
||||
return forms.fields.CharField()
|
||||
return super(BitbakeVersionAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
||||
|
||||
|
||||
|
||||
class ReleaseAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
class ToasterSettingAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(LayerSource, LayerSourceAdmin)
|
||||
admin.site.register(BitbakeVersion, BitbakeVersionAdmin)
|
||||
admin.site.register(Release, ReleaseAdmin)
|
||||
admin.site.register(ToasterSetting, ToasterSettingAdmin)
|
||||
@@ -1,26 +0,0 @@
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from django.conf.urls import patterns, include, url
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
urlpatterns = patterns('bldcollector.views',
|
||||
# landing point for pushing a bitbake_eventlog.json file to this toaster instace
|
||||
url(r'^eventfile$', 'eventfile', name='eventfile'),
|
||||
)
|
||||
@@ -1,62 +0,0 @@
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from django.http import HttpResponseBadRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
from django.utils.html import escape
|
||||
from datetime import timedelta
|
||||
from django.utils import formats
|
||||
from toastergui.templatetags.projecttags import json as jsonfilter
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import subprocess
|
||||
import toastermain
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def eventfile(request):
|
||||
""" Receives a file by POST, and runs toaster-eventreply on this file """
|
||||
if request.method != "POST":
|
||||
return HttpResponseBadRequest("This API only accepts POST requests. Post a file with:\n\ncurl -F eventlog=@bitbake_eventlog.json %s\n" % request.build_absolute_uri(reverse('eventfile')), content_type="text/plain;utf8")
|
||||
|
||||
# write temporary file
|
||||
(handle, abstemppath) = tempfile.mkstemp(dir="/tmp/")
|
||||
with os.fdopen(handle, "w") as tmpfile:
|
||||
for chunk in request.FILES['eventlog'].chunks():
|
||||
tmpfile.write(chunk)
|
||||
tmpfile.close()
|
||||
|
||||
# compute the path to "bitbake/bin/toaster-eventreplay"
|
||||
from os.path import dirname as DN
|
||||
import_script = os.path.join(DN(DN(DN(DN(os.path.abspath(__file__))))), "bin/toaster-eventreplay")
|
||||
if not os.path.exists(import_script):
|
||||
raise Exception("script missing %s" % import_script)
|
||||
scriptenv = os.environ.copy()
|
||||
scriptenv["DATABASE_URL"] = toastermain.settings.getDATABASE_URL()
|
||||
|
||||
# run the data loading process and return the results
|
||||
importer = subprocess.Popen([import_script, abstemppath], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=scriptenv)
|
||||
(out, err) = importer.communicate()
|
||||
if importer.returncode == 0:
|
||||
os.remove(abstemppath)
|
||||
return HttpResponse("== Retval %d\n== STDOUT\n%s\n\n== STDERR\n%s" % (importer.returncode, out, err), content_type="text/plain;utf8")
|
||||
@@ -81,6 +81,14 @@ def getBuildEnvironmentController(**kwargs):
|
||||
raise Exception("FIXME: Implement BEC for type %s" % str(be.betype))
|
||||
|
||||
|
||||
def _getgitcheckoutdirectoryname(url):
|
||||
""" Utility that returns the last component of a git path as directory
|
||||
"""
|
||||
import re
|
||||
components = re.split(r'[:\.\/]', url)
|
||||
return components[-2] if components[-1] == "git" else components[-1]
|
||||
|
||||
|
||||
class BuildEnvironmentController(object):
|
||||
""" BuildEnvironmentController (BEC) is the abstract class that defines the operations that MUST
|
||||
or SHOULD be supported by a Build Environment. It is used to establish the framework, and must
|
||||
@@ -109,43 +117,19 @@ class BuildEnvironmentController(object):
|
||||
self.be = be
|
||||
self.connection = None
|
||||
|
||||
@staticmethod
|
||||
def _updateBBLayers(bblayerconf, layerlist):
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
skip = 0
|
||||
for i in xrange(len(conflines)):
|
||||
if skip > 0:
|
||||
skip =- 1
|
||||
continue
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
skip = 1
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
bblayerconffile.write("# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
|
||||
def writeConfFile(self, variable_list = None, raw = None):
|
||||
""" Writes a configuration file in the build directory. Override with buildenv-specific implementation. """
|
||||
raise Exception("FIXME: Must override to actually write a configuration file")
|
||||
|
||||
|
||||
def startBBServer(self):
|
||||
""" Starts a BB server with Toaster toasterui set up to record the builds, an no controlling UI.
|
||||
After this method executes, self.be bbaddress/bbport MUST point to a running and free server,
|
||||
and the bbstate MUST be updated to "started".
|
||||
"""
|
||||
raise Exception("FIXME: Must override in order to actually start the BB server")
|
||||
raise Exception("Must override in order to actually start the BB server")
|
||||
|
||||
def stopBBServer(self):
|
||||
""" Stops the currently running BB server.
|
||||
The bbstate MUST be updated to "stopped".
|
||||
self.connection must be none.
|
||||
"""
|
||||
raise Exception("FIXME: Must override stoBBServer")
|
||||
|
||||
def setLayers(self, bbs, ls):
|
||||
""" Checks-out bitbake executor and layers from git repositories.
|
||||
@@ -155,7 +139,7 @@ class BuildEnvironmentController(object):
|
||||
|
||||
a word of attention: by convention, the first layer for any build will be poky!
|
||||
"""
|
||||
raise Exception("FIXME: Must override setLayers")
|
||||
raise Exception("Must override setLayers")
|
||||
|
||||
|
||||
def getBBController(self):
|
||||
@@ -190,9 +174,6 @@ class BuildEnvironmentController(object):
|
||||
"""
|
||||
raise Exception("Must override BE release")
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
raise Exception("Must override BE release")
|
||||
|
||||
class ShellCmdException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@@ -30,12 +30,7 @@ import subprocess
|
||||
|
||||
from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
from pprint import pprint, pformat
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
|
||||
|
||||
class LocalhostBEController(BuildEnvironmentController):
|
||||
""" Implementation of the BuildEnvironmentController for the localhost;
|
||||
@@ -54,148 +49,53 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
if cwd is None:
|
||||
cwd = self.be.sourcedir
|
||||
|
||||
logger.debug("lbc_shellcmmd: (%s) %s" % (cwd, command))
|
||||
p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out,err) = p.communicate()
|
||||
p.wait()
|
||||
if p.returncode:
|
||||
if len(err) == 0:
|
||||
err = "command: %s \n%s" % (command, out)
|
||||
else:
|
||||
err = "command: %s \n%s" % (command, err)
|
||||
logger.warn("localhostbecontroller: shellcmd error %s" % err)
|
||||
raise ShellCmdException(err)
|
||||
else:
|
||||
logger.debug("localhostbecontroller: shellcmd success")
|
||||
return out
|
||||
|
||||
def _createdirpath(self, path):
|
||||
from os.path import dirname as DN
|
||||
if path == "":
|
||||
raise Exception("Invalid path creation specified.")
|
||||
if not os.path.exists(DN(path)):
|
||||
self._createdirpath(DN(path))
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path, 0755)
|
||||
|
||||
def _setupBE(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
path = self.be.builddir
|
||||
if not path:
|
||||
raise Exception("Invalid path creation specified.")
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path, 0755)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, path))
|
||||
# delete the templateconf.cfg; it may come from an unsupported layer configuration
|
||||
os.remove(os.path.join(path, "conf/templateconf.cfg"))
|
||||
|
||||
|
||||
def writeConfFile(self, file_name, variable_list = None, raw = None):
|
||||
filepath = os.path.join(self.be.builddir, file_name)
|
||||
with open(filepath, "w") as conffile:
|
||||
if variable_list is not None:
|
||||
for i in variable_list:
|
||||
conffile.write("%s=\"%s\"\n" % (i.name, i.value))
|
||||
if raw is not None:
|
||||
conffile.write(raw)
|
||||
|
||||
self._createdirpath(self.be.builddir)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
|
||||
|
||||
def startBBServer(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
|
||||
# find our own toasterui listener/bitbake
|
||||
from toaster.bldcontrol.management.commands.loadconf import _reduce_canon_path
|
||||
|
||||
own_bitbake = _reduce_canon_path(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../bin/bitbake"))
|
||||
|
||||
assert os.path.exists(own_bitbake) and os.path.isfile(own_bitbake)
|
||||
|
||||
logger.debug("localhostbecontroller: running the listener at %s" % own_bitbake)
|
||||
|
||||
toaster_ui_log_filepath = os.path.join(self.be.builddir, "toaster_ui.log")
|
||||
# get the file length; we need to detect the _last_ start of the toaster UI, not the first
|
||||
toaster_ui_log_filelength = 0
|
||||
if os.path.exists(toaster_ui_log_filepath):
|
||||
with open(toaster_ui_log_filepath, "w") as f:
|
||||
f.seek(0, 2) # jump to the end
|
||||
toaster_ui_log_filelength = f.tell()
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env %s 2>&1 >toaster_server.log && bitbake --read %s/conf/toaster-pre.conf --postread %s/conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0 2>&1 >>toaster_server.log \"" % (self.pokydirname, self.be.builddir, self.be.builddir, self.be.builddir)
|
||||
|
||||
port = "-1"
|
||||
logger.debug("localhostbecontroller: starting builder \n%s\n" % cmd)
|
||||
|
||||
cmdoutput = self._shellcmd(cmd)
|
||||
with open(self.be.builddir + "/toaster_server.log", "r") as f:
|
||||
for i in f.readlines():
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
logger.debug("localhostbecontroller: Found bitbake server port %s" % port)
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env-memres -1 %s && DATABASE_URL=%s %s --observe-only -u toasterui --remote-server=0.0.0.0:-1 -t xmlrpc\"" % (self.pokydirname, self.be.builddir, self.dburl, own_bitbake)
|
||||
with open(toaster_ui_log_filepath, "a+") as f:
|
||||
p = subprocess.Popen(cmd, cwd = self.be.builddir, shell=True, stdout=f, stderr=f)
|
||||
|
||||
def _toaster_ui_started(filepath, filepos = 0):
|
||||
if not os.path.exists(filepath):
|
||||
return False
|
||||
with open(filepath, "r") as f:
|
||||
f.seek(filepos)
|
||||
for line in f:
|
||||
if line.startswith("NOTE: ToasterUI waiting for events"):
|
||||
return True
|
||||
return False
|
||||
|
||||
retries = 0
|
||||
started = False
|
||||
while not started and retries < 10:
|
||||
started = _toaster_ui_started(toaster_ui_log_filepath, toaster_ui_log_filelength)
|
||||
import time
|
||||
logger.debug("localhostbecontroller: Waiting bitbake server to start")
|
||||
time.sleep(0.5)
|
||||
retries += 1
|
||||
|
||||
if not started:
|
||||
toaster_ui_log = open(os.path.join(self.be.builddir, "toaster_ui.log"), "r").read()
|
||||
toaster_server_log = open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read()
|
||||
raise BuildSetupException("localhostbecontroller: Bitbake server did not start in 5 seconds, aborting (Error: '%s' '%s')" % (toaster_ui_log, toaster_server_log))
|
||||
|
||||
logger.debug("localhostbecontroller: Started bitbake server")
|
||||
|
||||
while port == "-1":
|
||||
# the port specification is "autodetect"; read the bitbake.lock file
|
||||
with open("%s/bitbake.lock" % self.be.builddir, "r") as f:
|
||||
for line in f.readlines():
|
||||
if ":" in line:
|
||||
port = line.split(":")[1].strip()
|
||||
logger.debug("localhostbecontroller: Autodetected bitbake port %s", port)
|
||||
break
|
||||
|
||||
print("DEBUG: executing ", "bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
# FIXME unfortunate sleep 1 - we need to make sure that bbserver is started and the toaster ui is connected
|
||||
# but since they start async without any return, we just wait a bit
|
||||
print "Started server"
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
self.be.bbaddress = "localhost"
|
||||
self.be.bbport = port
|
||||
self.be.bbport = "8200"
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STARTED
|
||||
self.be.save()
|
||||
|
||||
def stopBBServer(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
|
||||
(self.pokydirname, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STOPPED
|
||||
self.be.save()
|
||||
logger.debug("localhostbecontroller: Stopped bitbake server")
|
||||
|
||||
def getGitCloneDirectory(self, url, branch):
|
||||
""" Utility that returns the last component of a git path as directory
|
||||
"""
|
||||
import re
|
||||
components = re.split(r'[:\.\/]', url)
|
||||
base = components[-2] if components[-1] == "git" else components[-1]
|
||||
|
||||
if branch != "HEAD":
|
||||
return "_%s_%s.toaster_cloned" % (base, branch)
|
||||
|
||||
|
||||
# word of attention; this is a localhost-specific issue; only on the localhost we expect to have "HEAD" releases
|
||||
# which _ALWAYS_ means the current poky checkout
|
||||
from os.path import dirname as DN
|
||||
local_checkout_path = DN(DN(DN(DN(DN(os.path.abspath(__file__))))))
|
||||
#logger.debug("localhostbecontroller: using HEAD checkout in %s" % local_checkout_path)
|
||||
return local_checkout_path
|
||||
|
||||
print "Stopped server"
|
||||
|
||||
def setLayers(self, bitbakes, layers):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
@@ -204,133 +104,88 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
assert len(bitbakes) == 1
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos with branches, and map dirpaths for each layer
|
||||
# 1. get a list of repos, and map dirpaths for each layer
|
||||
gitrepos = {}
|
||||
|
||||
gitrepos[(bitbakes[0].giturl, bitbakes[0].commit)] = []
|
||||
gitrepos[(bitbakes[0].giturl, bitbakes[0].commit)].append( ("bitbake", bitbakes[0].dirpath) )
|
||||
|
||||
gitrepos[bitbakes[0].giturl] = []
|
||||
gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
|
||||
|
||||
for layer in layers:
|
||||
# we don't process local URLs
|
||||
if layer.giturl.startswith("file://"):
|
||||
continue
|
||||
if not (layer.giturl, layer.commit) in gitrepos:
|
||||
gitrepos[(layer.giturl, layer.commit)] = []
|
||||
gitrepos[(layer.giturl, layer.commit)].append( (layer.name, layer.dirpath) )
|
||||
if not layer.giturl in gitrepos:
|
||||
gitrepos[layer.giturl] = []
|
||||
gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
|
||||
for giturl in gitrepos.keys():
|
||||
commitid = gitrepos[giturl][0][2]
|
||||
for e in gitrepos[giturl]:
|
||||
if commitid != e[2]:
|
||||
raise BuildSetupException("More than one commit per git url, unsupported configuration")
|
||||
|
||||
|
||||
logger.debug("localhostbecontroller, our git repos are %s" % pformat(gitrepos))
|
||||
|
||||
|
||||
# 2. find checked-out git repos in the sourcedir directory that may help faster cloning
|
||||
|
||||
cached_layers = {}
|
||||
for ldir in os.listdir(self.be.sourcedir):
|
||||
fldir = os.path.join(self.be.sourcedir, ldir)
|
||||
if os.path.isdir(fldir):
|
||||
try:
|
||||
for line in self._shellcmd("git remote -v", fldir).split("\n"):
|
||||
try:
|
||||
remote = line.split("\t")[1].split(" ")[0]
|
||||
if remote not in cached_layers:
|
||||
cached_layers[remote] = fldir
|
||||
except IndexError:
|
||||
pass
|
||||
except ShellCmdException:
|
||||
# ignore any errors in collecting git remotes
|
||||
pass
|
||||
|
||||
layerlist = []
|
||||
|
||||
|
||||
# 3. checkout the repositories
|
||||
for giturl, commit in gitrepos.keys():
|
||||
localdirname = os.path.join(self.be.sourcedir, self.getGitCloneDirectory(giturl, commit))
|
||||
logger.debug("localhostbecontroller: giturl %s:%s checking out in current directory %s" % (giturl, commit, localdirname))
|
||||
# 2. checkout the repositories
|
||||
for giturl in gitrepos.keys():
|
||||
localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
|
||||
print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
|
||||
|
||||
# make sure our directory is a git repository
|
||||
if os.path.exists(localdirname):
|
||||
localremotes = self._shellcmd("git remote -v", localdirname)
|
||||
if not giturl in localremotes:
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes ('%s', expected '%s'). Toaster will not continue out of fear of damaging something." % (localdirname, ", ".join(localremotes.split("\n")), giturl))
|
||||
if not giturl in self._shellcmd("git remote -v", localdirname):
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
|
||||
else:
|
||||
if giturl in cached_layers:
|
||||
logger.debug("localhostbecontroller git-copying %s to %s" % (cached_layers[giturl], localdirname))
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (cached_layers[giturl], localdirname))
|
||||
self._shellcmd("git remote remove origin", localdirname)
|
||||
self._shellcmd("git remote add origin \"%s\"" % giturl, localdirname)
|
||||
else:
|
||||
logger.debug("localhostbecontroller: cloning %s:%s in %s" % (giturl, commit, localdirname))
|
||||
self._shellcmd("git clone \"%s\" --single-branch --branch \"%s\" \"%s\"" % (giturl, commit, localdirname))
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
|
||||
# checkout the needed commit
|
||||
commit = gitrepos[giturl][0][2]
|
||||
|
||||
# branch magic name "HEAD" will inhibit checkout
|
||||
if commit != "HEAD":
|
||||
logger.debug("localhostbecontroller: checking out commit %s to %s " % (commit, localdirname))
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\" && git rebase \"origin/%s\"" % (commit, commit) , localdirname)
|
||||
print "DEBUG: checking out commit ", commit, "to", localdirname
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
|
||||
|
||||
# take the localdirname as poky dir if we can find the oe-init-build-env
|
||||
if self.pokydirname is None and os.path.exists(os.path.join(localdirname, "oe-init-build-env")):
|
||||
logger.debug("localhostbecontroller: selected poky dir name %s" % localdirname)
|
||||
print "DEBUG: selected poky dir name", localdirname
|
||||
self.pokydirname = localdirname
|
||||
|
||||
# make sure we have a working bitbake
|
||||
if not os.path.exists(os.path.join(self.pokydirname, 'bitbake')):
|
||||
logger.debug("localhostbecontroller: checking bitbake into the poky dirname %s " % self.pokydirname)
|
||||
self._shellcmd("git clone -b \"%s\" \"%s\" \"%s\" " % (bitbakes[0].commit, bitbakes[0].giturl, os.path.join(self.pokydirname, 'bitbake')))
|
||||
|
||||
# verify our repositories
|
||||
for name, dirpath in gitrepos[(giturl, commit)]:
|
||||
for name, dirpath, commit in gitrepos[giturl]:
|
||||
localdirpath = os.path.join(localdirname, dirpath)
|
||||
logger.debug("localhostbecontroller: localdirpath expected '%s'" % localdirpath)
|
||||
if not os.path.exists(localdirpath):
|
||||
raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
|
||||
|
||||
if name != "bitbake":
|
||||
layerlist.append(localdirpath.rstrip("/"))
|
||||
layerlist.append(localdirpath)
|
||||
|
||||
logger.debug("localhostbecontroller: current layer list %s " % pformat(layerlist))
|
||||
print "DEBUG: current layer list ", layerlist
|
||||
|
||||
# 4. configure the build environment, so we have a conf/bblayers.conf
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
|
||||
# 5. update the bblayers.conf
|
||||
# 4. update the bblayers.conf
|
||||
bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
|
||||
if not os.path.exists(bblayerconf):
|
||||
raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
|
||||
|
||||
BuildEnvironmentController._updateBBLayers(bblayerconf, layerlist)
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
for i in xrange(len(conflines)):
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
i += 2
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
bblayerconffile.write("\n# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
self.islayerset = True
|
||||
return True
|
||||
|
||||
def readServerLogFile(self):
|
||||
return open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read()
|
||||
|
||||
def release(self):
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
import shutil
|
||||
shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
|
||||
assert not os.path.exists(self.be.builddir)
|
||||
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
# set up the buid environment with the needed layers
|
||||
self.setLayers(bitbake, layers)
|
||||
self.writeConfFile("conf/toaster-pre.conf", variables)
|
||||
self.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = self.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, targets))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
|
||||
bbctrl.build(list(map(lambda x:x.target, targets)), task)
|
||||
|
||||
logger.debug("localhostbecontroller: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % self.be.builddir)
|
||||
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
from orm.models import ToasterSetting, Build
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment
|
||||
import os
|
||||
|
||||
def DN(path):
|
||||
@@ -16,6 +17,16 @@ class Command(NoArgsCommand):
|
||||
args = ""
|
||||
help = "Verifies that the configured settings are valid and usable, or prompts the user to fix the settings."
|
||||
|
||||
def _reduce_canon_path(self, path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
return "/".join(components)
|
||||
|
||||
def _find_first_path_for_file(self, startdirectory, filename, level = 0):
|
||||
if level < 0:
|
||||
@@ -34,22 +45,6 @@ class Command(NoArgsCommand):
|
||||
return ret
|
||||
return None
|
||||
|
||||
def _recursive_list_directories(self, startdirectory, level = 0):
|
||||
if level < 0:
|
||||
return []
|
||||
dirs = []
|
||||
try:
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
except OSError:
|
||||
pass
|
||||
for j in dirs:
|
||||
dirs = dirs + self._recursive_list_directories(j, level - 1)
|
||||
return dirs
|
||||
|
||||
|
||||
def _get_suggested_sourcedir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
@@ -58,181 +53,160 @@ class Command(NoArgsCommand):
|
||||
def _get_suggested_builddir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
|
||||
return DN(self._find_first_path_for_file(self.guesspath, "bblayers.conf", 3))
|
||||
|
||||
def _import_layer_config(self, baselayerdir):
|
||||
filepath = os.path.join(baselayerdir, "meta/conf/toasterconf.json")
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
def _verify_artifact_storage_dir(self):
|
||||
# verify that we have a settings for downloading artifacts
|
||||
while ToasterSetting.objects.filter(name="ARTIFACTS_STORAGE_DIR").count() == 0:
|
||||
guessedpath = os.getcwd() + "/toaster_build_artifacts/"
|
||||
print("Toaster needs to know in which directory it can download build log files and other artifacts.\n Toaster suggests \"%s\"." % guessedpath)
|
||||
artifacts_storage_dir = raw_input(" Press Enter to select \"%s\" or type the full path to a different directory: " % guessedpath)
|
||||
if len(artifacts_storage_dir) == 0:
|
||||
artifacts_storage_dir = guessedpath
|
||||
if len(artifacts_storage_dir) > 0 and artifacts_storage_dir.startswith("/"):
|
||||
try:
|
||||
os.makedirs(artifacts_storage_dir)
|
||||
except OSError as ose:
|
||||
if "File exists" in str(ose):
|
||||
pass
|
||||
else:
|
||||
raise ose
|
||||
ToasterSetting.objects.create(name="ARTIFACTS_STORAGE_DIR", value=artifacts_storage_dir)
|
||||
return 0
|
||||
import json, pprint
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
def _verify_build_environment(self):
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
# refuse to start if we have no build environments
|
||||
while BuildEnvironment.objects.count() == 0:
|
||||
print(" !! No build environments found. Toaster needs at least one build environment in order to be able to run builds.\n" +
|
||||
"You can manually define build environments in the database table bldcontrol_buildenvironment.\n" +
|
||||
"Or Toaster can define a simple localhost-based build environment for you.")
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
bvo.giturl = bvi['giturl']
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
i = raw_input(" -- Do you want to create a basic localhost build environment ? (Y/n) ");
|
||||
if not len(i) or i.startswith("y") or i.startswith("Y"):
|
||||
BuildEnvironment.objects.create(pk = 1, betype = 0)
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
if lsi['sourcetype'] == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
raise Exception("Toaster cannot start without build environments. Aborting.")
|
||||
apiurl = self._reduce_canon_path(os.path.join(DN(filepath), lsi['apiurl']))
|
||||
|
||||
try:
|
||||
ls = LayerSource.objects.get(sourcetype = lsi['sourcetype'], apiurl = apiurl)
|
||||
except LayerSource.DoesNotExist:
|
||||
ls = LayerSource.objects.create(
|
||||
name = lsi['name'],
|
||||
sourcetype = lsi['sourcetype'],
|
||||
apiurl = apiurl
|
||||
)
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = self._reduce_canon_path(os.path.join(DN(filepath), layerinfo['local_path']))
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
if 'vcs_url' in layerinfo:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo)
|
||||
ro.description = ri['description']
|
||||
ro.branch = ri['branch']
|
||||
ro.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
lsi, layername = dli.split(":")
|
||||
layer, created = Layer.objects.get_or_create(
|
||||
layer_source = LayerSource.objects.get(name = lsi),
|
||||
name = layername
|
||||
)
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer = layer)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
def handle(self, **options):
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
|
||||
# we make sure we have builddir and sourcedir for all defined build envionments
|
||||
for be in BuildEnvironment.objects.all():
|
||||
be.needs_import = False
|
||||
def _verify_be():
|
||||
is_changed = False
|
||||
print("Verifying the Build Environment. If the local Build Environment is not properly configured, you will be asked to configure it.")
|
||||
|
||||
def _update_sourcedir():
|
||||
print("Verifying the Build Environment type %s id %d." % (be.get_betype_display(), be.pk))
|
||||
if len(be.sourcedir) == 0:
|
||||
suggesteddir = self._get_suggested_sourcedir(be)
|
||||
if len(suggesteddir) > 0:
|
||||
be.sourcedir = raw_input("Toaster needs to know in which directory it should check out the layers that will be needed for your builds.\n Toaster suggests \"%s\". If you select this directory, a layer like \"meta-intel\" will end up in \"%s/meta-intel\".\n Press Enter to select \"%s\" or type the full path to a different directory (must be a parent of current checkout directory): " % (suggesteddir, suggesteddir, suggesteddir))
|
||||
else:
|
||||
be.sourcedir = raw_input("Toaster needs to know in which directory it should check out the layers that will be needed for your builds. Type the full path to the directory (for example: \"%s\": " % os.environ.get('HOME', '/tmp/'))
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.sourcedir) == 0 and len(suggesteddir) > 0:
|
||||
be.sourcedir = suggesteddir
|
||||
return True
|
||||
|
||||
if len(be.sourcedir) == 0:
|
||||
print "\n -- Validation: The checkout directory must be set."
|
||||
is_changed = _update_sourcedir()
|
||||
is_changed = True
|
||||
|
||||
if not be.sourcedir.startswith("/"):
|
||||
print "\n -- Validation: The checkout directory must be set to an absolute path."
|
||||
is_changed = _update_sourcedir()
|
||||
|
||||
if not be.sourcedir in DN(__file__):
|
||||
print "\n -- Validation: The checkout directory must be a parent of the current checkout."
|
||||
is_changed = _update_sourcedir()
|
||||
|
||||
if is_changed:
|
||||
if be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
be.needs_import = True
|
||||
return True
|
||||
|
||||
def _update_builddir():
|
||||
suggesteddir = self._get_suggested_builddir(be)
|
||||
if len(suggesteddir) > 0:
|
||||
be.builddir = raw_input("Toaster needs to know where it your build directory is located.\n The build directory is where all the artifacts created by your builds will be stored. Toaster suggests \"%s\".\n Press Enter to select \"%s\" or type the full path to a different directory: " % (suggesteddir, suggesteddir))
|
||||
else:
|
||||
be.builddir = raw_input("Toaster needs to know where is your build directory.\n The build directory is where all the artifacts created by your builds will be stored. Type the full path to the directory (for example: \" %s/build\")" % os.environ.get('HOME','/tmp/'))
|
||||
if len(be.builddir) == 0 and len(suggesteddir) > 0:
|
||||
be.builddir = suggesteddir
|
||||
return True
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
if len(be.builddir) == 0:
|
||||
print "\n -- Validation: The build directory must be set."
|
||||
is_changed = _update_builddir()
|
||||
suggesteddir = self._get_suggested_builddir(be)
|
||||
be.builddir = raw_input(" -- Build directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.builddir) == 0 and len(suggesteddir) > 0:
|
||||
be.builddir = suggesteddir
|
||||
is_changed = True
|
||||
|
||||
if not be.builddir.startswith("/"):
|
||||
print "\n -- Validation: The build directory must to be set to an absolute path."
|
||||
is_changed = _update_builddir()
|
||||
be.builddir = raw_input(" -- Build directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
|
||||
|
||||
if is_changed:
|
||||
print "Build configuration saved"
|
||||
be.save()
|
||||
return True
|
||||
|
||||
|
||||
if be.needs_import:
|
||||
print "\nToaster can use a SINGLE predefined configuration file to set up default project settings and layer information sources.\n"
|
||||
|
||||
# find configuration files
|
||||
config_files = []
|
||||
for dirname in self._recursive_list_directories(be.sourcedir,2):
|
||||
if os.path.exists(os.path.join(dirname, ".templateconf")):
|
||||
import subprocess
|
||||
conffilepath, error = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE).communicate()
|
||||
conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
|
||||
candidatefilepath = os.path.join(dirname, conffilepath)
|
||||
if "toaster_cloned" in candidatefilepath:
|
||||
continue
|
||||
if os.path.exists(candidatefilepath):
|
||||
config_files.append(candidatefilepath)
|
||||
|
||||
if len(config_files) > 0:
|
||||
print " Toaster will list now the configuration files that it found. Select the number to use the desired configuration file."
|
||||
for cf in config_files:
|
||||
print " [%d] - %s" % (config_files.index(cf) + 1, cf)
|
||||
print "\n [0] - Exit without importing any file"
|
||||
try:
|
||||
i = raw_input("\n Enter your option: ")
|
||||
if len(i) and (int(i) - 1 >= 0 and int(i) - 1 < len(config_files)):
|
||||
print "Importing file: %s" % config_files[int(i)-1]
|
||||
from loadconf import Command as LoadConfigCommand
|
||||
|
||||
LoadConfigCommand()._import_layer_config(config_files[int(i)-1])
|
||||
# we run lsupdates after config update
|
||||
print "Layer configuration imported. Updating information from the layer sources, please wait.\n You can re-update any time later by running bitbake/lib/toaster/manage.py lsupdates"
|
||||
from django.core.management import call_command
|
||||
call_command("lsupdates")
|
||||
|
||||
# we don't look for any other config files
|
||||
return is_changed
|
||||
except Exception as e:
|
||||
print "Failure while trying to import the toaster config file: %s" % e
|
||||
else:
|
||||
print "\n Toaster could not find a configuration file. You need to configure Toaster manually using the web interface, or create a configuration file and use\n bitbake/lib/toaster/managepy.py loadconf [filename]\n command to load it. You can use https://wiki.yoctoproject.org/wiki/File:Toasterconf.json.txt.patch as a starting point."
|
||||
|
||||
|
||||
|
||||
if is_changed and be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
baselayerdir = DN(DN(self._find_first_path_for_file(be.sourcedir, "toasterconf.json", 3)))
|
||||
if baselayerdir:
|
||||
i = raw_input(" -- Do you want to import basic layer configuration from \"%s\" ? (y/N):" % baselayerdir)
|
||||
if len(i) and i.upper()[0] == 'Y':
|
||||
self._import_layer_config(baselayerdir)
|
||||
# we run lsupdates after config update
|
||||
print "Updating information from the layer source, please wait."
|
||||
from django.core.management import call_command
|
||||
call_command("lsupdates")
|
||||
pass
|
||||
|
||||
return is_changed
|
||||
|
||||
while (_verify_be()):
|
||||
pass
|
||||
return 0
|
||||
|
||||
def _verify_default_settings(self):
|
||||
# verify that default settings are there
|
||||
if ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').count() != 1:
|
||||
ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
|
||||
ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
|
||||
return 0
|
||||
|
||||
def _verify_builds_in_progress(self):
|
||||
# we are just starting up. we must not have any builds in progress, or build environments taken
|
||||
for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
|
||||
BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
|
||||
|
||||
BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS).update(state = BuildRequest.REQ_FAILED)
|
||||
|
||||
BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
# also mark "In Progress builds as failures"
|
||||
from django.utils import timezone
|
||||
Build.objects.filter(outcome = Build.IN_PROGRESS).update(outcome = Build.FAILED, completed_on = timezone.now())
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
def handle(self, **options):
|
||||
retval = 0
|
||||
retval += self._verify_artifact_storage_dir()
|
||||
retval += self._verify_build_environment()
|
||||
retval += self._verify_default_settings()
|
||||
retval += self._verify_builds_in_progress()
|
||||
|
||||
return retval
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer, ReleaseLayerSourcePriority
|
||||
import os
|
||||
|
||||
from checksettings import DN
|
||||
|
||||
def _reduce_canon_path(path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
if len(components) < 2:
|
||||
components.append('')
|
||||
return "/".join(components)
|
||||
|
||||
def _get_id_for_sourcetype(s):
|
||||
for i in LayerSource.SOURCE_TYPE:
|
||||
if s == i[1]:
|
||||
return i[0]
|
||||
raise Exception("Could not find definition for sourcetype '%s'. Valid source types are %s" % (str(s), ', '.join(map(lambda x: "'%s'" % x[1], LayerSource.SOURCE_TYPE ))))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Loads a toasterconf.json file in the database"
|
||||
args = "filepath"
|
||||
|
||||
|
||||
|
||||
def _import_layer_config(self, filepath):
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
import json, pprint
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
def _read_git_url_from_local_repository(address):
|
||||
url = None
|
||||
# we detect the remote name at runtime
|
||||
import subprocess
|
||||
(remote, remote_name) = address.split(":", 1)
|
||||
cmd = subprocess.Popen("git remote -v", shell=True, cwd = os.path.dirname(filepath), stdout=subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(out,err) = cmd.communicate()
|
||||
if cmd.returncode != 0:
|
||||
raise Exception("Error while importing layer vcs_url: git error: %s" % err)
|
||||
for line in out.split("\n"):
|
||||
try:
|
||||
(name, path) = line.split("\t", 1)
|
||||
if name == remote_name:
|
||||
url = path.split(" ")[0]
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
if url == None:
|
||||
raise Exception("Error while looking for remote \"%s\" in \"%s\"" % (remote_name, out))
|
||||
return url
|
||||
|
||||
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
bvo.giturl = bvi['giturl']
|
||||
if bvi['giturl'].startswith("remote:"):
|
||||
bvo.giturl = _read_git_url_from_local_repository(bvi['giturl'])
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
|
||||
if _get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
apiurl = _reduce_canon_path(os.path.join(DN(os.path.abspath(filepath)), lsi['apiurl']))
|
||||
|
||||
assert ((_get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX) or apiurl.startswith("/")), (lsi['sourcetype'],apiurl)
|
||||
|
||||
try:
|
||||
ls = LayerSource.objects.get(sourcetype = _get_id_for_sourcetype(lsi['sourcetype']), apiurl = apiurl)
|
||||
except LayerSource.DoesNotExist:
|
||||
ls = LayerSource.objects.create(
|
||||
name = lsi['name'],
|
||||
sourcetype = _get_id_for_sourcetype(lsi['sourcetype']),
|
||||
apiurl = apiurl
|
||||
)
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = _reduce_canon_path(os.path.join(ls.apiurl, layerinfo['local_path']))
|
||||
|
||||
if not os.path.exists(lo.local_path):
|
||||
raise Exception("Local layer path %s must exists." % lo.local_path)
|
||||
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
if layerinfo['vcs_url'].startswith("remote:"):
|
||||
lo.vcs_url = _read_git_url_from_local_repository(layerinfo['vcs_url'])
|
||||
else:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
|
||||
if 'layer_index_url' in layerinfo:
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo, branch_name = ri['branch'])
|
||||
ro.description = ri['description']
|
||||
ro.helptext = ri['helptext']
|
||||
ro.save()
|
||||
|
||||
# save layer source priority for release
|
||||
for ls_name in ri['layersourcepriority'].keys():
|
||||
rlspo, created = ReleaseLayerSourcePriority.objects.get_or_create(release = ro, layer_source = LayerSource.objects.get(name=ls_name))
|
||||
rlspo.priority = ri['layersourcepriority'][ls_name]
|
||||
rlspo.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
# find layers with the same name
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer_name = dli)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Need a path to the toasterconf.json file")
|
||||
filepath = args[0]
|
||||
self._import_layer_config(filepath)
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import Build, ToasterSetting, LogMessage, Target
|
||||
from orm.models import Build
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError, BRVariable
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("ToasterScheduler")
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
@@ -35,7 +32,6 @@ class Command(NoArgsCommand):
|
||||
# select the build environment and the request to build
|
||||
br = self._selectBuildRequest()
|
||||
except IndexError as e:
|
||||
#logger.debug("runbuilds: No build request")
|
||||
return
|
||||
try:
|
||||
bec = self._selectBuildEnvironment()
|
||||
@@ -43,111 +39,52 @@ class Command(NoArgsCommand):
|
||||
# we could not find a BEC; postpone the BR
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
br.save()
|
||||
logger.debug("runbuilds: No build env")
|
||||
return
|
||||
|
||||
logger.debug("runbuilds: starting build %s, environment %s" % (br, bec.be))
|
||||
# set up the buid environment with the needed layers
|
||||
print "Build %s, Environment %s" % (br, bec.be)
|
||||
bec.setLayers(br.brbitbake_set.all(), br.brlayer_set.all())
|
||||
|
||||
# write the build identification variable
|
||||
BRVariable.objects.create(req = br, name="TOASTER_BRBE", value="%d:%d" % (br.pk, bec.be.pk))
|
||||
# get the bb server running
|
||||
bbctrl = bec.getBBController()
|
||||
|
||||
# let the build request know where it is being executed
|
||||
br.environment = bec.be
|
||||
br.save()
|
||||
# let toasterui that this is a managed build
|
||||
bbctrl.setVariable("TOASTER_BRBE", "%d:%d" % (br.pk, bec.be.pk))
|
||||
|
||||
# set the build configuration
|
||||
for variable in br.brvariable_set.all():
|
||||
bbctrl.setVariable(variable.name, variable.value)
|
||||
|
||||
# trigger the build command
|
||||
bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())))
|
||||
|
||||
print "Build launched, exiting"
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
# cleanup to be performed by toaster when the deed is done
|
||||
|
||||
# this triggers an async build
|
||||
bec.triggerBuild(br.brbitbake_set.all(), br.brlayer_set.all(), br.brvariable_set.all(), br.brtarget_set.all())
|
||||
|
||||
except Exception as e:
|
||||
logger.error("runbuilds: Error launching build %s" % e)
|
||||
print " EE Error executing shell command\n", e
|
||||
traceback.print_exc(e)
|
||||
if "[Errno 111] Connection refused" in str(e):
|
||||
# Connection refused, read toaster_server.out
|
||||
errmsg = bec.readServerLogFile()
|
||||
else:
|
||||
errmsg = str(e)
|
||||
|
||||
BRError.objects.create(req = br,
|
||||
errtype = str(type(e)),
|
||||
errmsg = errmsg,
|
||||
traceback = traceback.format_exc(e))
|
||||
errtype = str(type(e)),
|
||||
errmsg = str(e),
|
||||
traceback = traceback.format_exc(e))
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
bec.be.lock = BuildEnvironment.LOCK_FREE
|
||||
bec.be.save()
|
||||
|
||||
def archive(self):
|
||||
''' archives data from the builds '''
|
||||
artifact_storage_dir = ToasterSetting.objects.get(name="ARTIFACTS_STORAGE_DIR").value
|
||||
for br in BuildRequest.objects.filter(state = BuildRequest.REQ_ARCHIVE):
|
||||
# save cooker log
|
||||
if br.build == None:
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
continue
|
||||
build_artifact_storage_dir = os.path.join(artifact_storage_dir, "%d" % br.build.pk)
|
||||
try:
|
||||
os.makedirs(build_artifact_storage_dir)
|
||||
except OSError as ose:
|
||||
if "File exists" in str(ose):
|
||||
pass
|
||||
else:
|
||||
raise ose
|
||||
|
||||
file_name = os.path.join(build_artifact_storage_dir, "cooker_log.txt")
|
||||
try:
|
||||
with open(file_name, "w") as f:
|
||||
f.write(br.environment.get_artifact(br.build.cooker_log_path).read())
|
||||
except IOError:
|
||||
os.unlink(file_name)
|
||||
|
||||
br.state = BuildRequest.REQ_COMPLETED
|
||||
br.save()
|
||||
|
||||
def cleanup(self):
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
# environments locked for more than 30 seconds - they should be unlocked
|
||||
BuildEnvironment.objects.filter(buildrequest__state__in=[BuildRequest.REQ_FAILED, BuildRequest.REQ_COMPLETED]).filter(lock=BuildEnvironment.LOCK_LOCK).filter(updated__lt = timezone.now() - timedelta(seconds = 30)).update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
|
||||
# update all Builds that failed to start
|
||||
|
||||
for br in BuildRequest.objects.filter(state = BuildRequest.REQ_FAILED, build__outcome = Build.IN_PROGRESS):
|
||||
# transpose the launch errors in ToasterExceptions
|
||||
br.build.outcome = Build.FAILED
|
||||
for brerror in br.brerror_set.all():
|
||||
logger.debug("Saving error %s" % brerror)
|
||||
LogMessage.objects.create(build = br.build, level = LogMessage.EXCEPTION, message = brerror.errmsg)
|
||||
br.build.save()
|
||||
|
||||
# we don't have a true build object here; hence, toasterui didn't have a change to release the BE lock
|
||||
br.environment.lock = BuildEnvironment.LOCK_FREE
|
||||
br.environment.save()
|
||||
|
||||
|
||||
|
||||
# update all BuildRequests without a build created
|
||||
for br in BuildRequest.objects.filter(build = None):
|
||||
br.build = Build.objects.create(project = br.project, completed_on = br.updated, started_on = br.created)
|
||||
br.build.outcome = BuildRequest.REQ_FAILED
|
||||
try:
|
||||
br.build.machine = br.brvariable_set.get(name='MACHINE').value
|
||||
except BRVariable.DoesNotExist:
|
||||
pass
|
||||
br.save()
|
||||
# transpose target information
|
||||
for brtarget in br.brtarget_set.all():
|
||||
Target.objects.create(build = br.build, target= brtarget.target)
|
||||
# transpose the launch errors in ToasterExceptions
|
||||
for brerror in br.brerror_set.all():
|
||||
LogMessage.objects.create(build = br.build, level = LogMessage.EXCEPTION, message = brerror.errmsg)
|
||||
|
||||
br.build.save()
|
||||
pass
|
||||
BuildEnvironment.objects.filter(lock=BuildEnvironment.LOCK_LOCK).filter(updated__lt = timezone.now() - timedelta(seconds = 30)).update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
self.cleanup()
|
||||
self.archive()
|
||||
self.schedule()
|
||||
|
||||
@@ -1,145 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'BuildRequest.environment'
|
||||
db.add_column(u'bldcontrol_buildrequest', 'environment',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildEnvironment'], null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['orm.Build'], unique=True, null=True))
|
||||
# Adding unique constraint on 'BuildRequest', fields ['build']
|
||||
db.create_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Removing unique constraint on 'BuildRequest', fields ['build']
|
||||
db.delete_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
# Deleting field 'BuildRequest.environment'
|
||||
db.delete_column(u'bldcontrol_buildrequest', 'environment_id')
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True))
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,138 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import DataMigration
|
||||
from django.db import models
|
||||
|
||||
class Migration(DataMigration):
|
||||
# ids that cannot be imported from BuildRequest
|
||||
|
||||
def forwards(self, orm):
|
||||
REQ_COMPLETED = 3
|
||||
REQ_ARCHIVE = 6
|
||||
"Write your forwards methods here."
|
||||
# Note: Don't use "from appname.models import ModelName".
|
||||
# Use orm.ModelName to refer to models in this application,
|
||||
# and orm['appname.ModelName'] for models in other applications.
|
||||
orm.BuildRequest.objects.filter(state=REQ_COMPLETED).update(state=REQ_ARCHIVE)
|
||||
|
||||
def backwards(self, orm):
|
||||
REQ_COMPLETED = 3
|
||||
REQ_ARCHIVE = 6
|
||||
"Write your backwards methods here."
|
||||
orm.BuildRequest.objects.filter(state=REQ_ARCHIVE).update(state=REQ_COMPLETED)
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
symmetrical = True
|
||||
@@ -40,50 +40,6 @@ class BuildEnvironment(models.Model):
|
||||
updated = models.DateTimeField(auto_now = True)
|
||||
|
||||
|
||||
def get_artifact_type(self, path):
|
||||
if self.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
try:
|
||||
import magic
|
||||
|
||||
# fair warning: this is a mess; there are multiple competeing and incompatible
|
||||
# magic modules floating around, so we try some of the most common combinations
|
||||
|
||||
try: # we try ubuntu's python-magic 5.4
|
||||
m = magic.open(magic.MAGIC_MIME_TYPE)
|
||||
m.load()
|
||||
return m.file(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try: # we try python-magic 0.4.6
|
||||
m = magic.Magic(magic.MAGIC_MIME)
|
||||
return m.from_file(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try: # we try pip filemagic 1.6
|
||||
m = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
|
||||
return m.id_filename(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return "binary/octet-stream"
|
||||
except ImportError:
|
||||
return "binary/octet-stream"
|
||||
raise Exception("FIXME: artifact type not implemented for build environment type %s" % self.get_betype_display())
|
||||
|
||||
|
||||
def get_artifact(self, path):
|
||||
if self.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
return open(path, "r")
|
||||
raise Exception("FIXME: artifact download not implemented for build environment type %s" % self.get_betype_display())
|
||||
|
||||
def has_artifact(self, path):
|
||||
import os
|
||||
if self.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
return os.path.exists(path)
|
||||
raise Exception("FIXME: has artifact not implemented for build environment type %s" % self.get_betype_display())
|
||||
|
||||
# a BuildRequest is a request that the scheduler will build using a BuildEnvironment
|
||||
# the build request queue is the table itself, ordered by state
|
||||
|
||||
@@ -93,8 +49,6 @@ class BuildRequest(models.Model):
|
||||
REQ_INPROGRESS = 2
|
||||
REQ_COMPLETED = 3
|
||||
REQ_FAILED = 4
|
||||
REQ_DELETED = 5
|
||||
REQ_ARCHIVE = 6
|
||||
|
||||
REQUEST_STATE = (
|
||||
(REQ_CREATED, "created"),
|
||||
@@ -102,31 +56,14 @@ class BuildRequest(models.Model):
|
||||
(REQ_INPROGRESS, "in progress"),
|
||||
(REQ_COMPLETED, "completed"),
|
||||
(REQ_FAILED, "failed"),
|
||||
(REQ_DELETED, "deleted"),
|
||||
(REQ_ARCHIVE, "archive"),
|
||||
)
|
||||
|
||||
search_allowed_fields = ("brtarget__target", "build__project__name")
|
||||
|
||||
project = models.ForeignKey(Project)
|
||||
build = models.OneToOneField(Build, null = True) # TODO: toasterui should set this when Build is created
|
||||
environment = models.ForeignKey(BuildEnvironment, null = True)
|
||||
build = models.ForeignKey(Build, null = True) # TODO: toasterui should set this when Build is created
|
||||
state = models.IntegerField(choices = REQUEST_STATE, default = REQ_CREATED)
|
||||
created = models.DateTimeField(auto_now_add = True)
|
||||
updated = models.DateTimeField(auto_now = True)
|
||||
|
||||
def get_duration(self):
|
||||
return (self.updated - self.created).total_seconds()
|
||||
|
||||
def get_sorted_target_list(self):
|
||||
tgts = self.brtarget_set.order_by( 'target' );
|
||||
return( tgts );
|
||||
|
||||
def get_machine(self):
|
||||
return self.brvariable_set.get(name="MACHINE").value
|
||||
|
||||
def __str__(self):
|
||||
return "%s %s" % (self.project, self.get_state_display())
|
||||
|
||||
# These tables specify the settings for running an actual build.
|
||||
# They MUST be kept in sync with the tables in orm.models.Project*
|
||||
@@ -159,6 +96,3 @@ class BRError(models.Model):
|
||||
errtype = models.CharField(max_length=100)
|
||||
errmsg = models.TextField()
|
||||
traceback = models.TextField()
|
||||
|
||||
def __str__(self):
|
||||
return "%s (%s)" % (self.errmsg, self.req)
|
||||
|
||||
@@ -29,10 +29,7 @@ import subprocess
|
||||
|
||||
from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
|
||||
class NotImplementedException(Exception):
|
||||
pass
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
|
||||
|
||||
def DN(path):
|
||||
return "/".join(path.split("/")[0:-1])
|
||||
@@ -80,21 +77,16 @@ class SSHBEController(BuildEnvironmentController):
|
||||
self._pathcreate(self.be.builddir)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
|
||||
|
||||
def startBBServer(self, brbe):
|
||||
def startBBServer(self):
|
||||
assert self.pokydirname and self._pathexists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
cmd = self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb brbe=%s\"" % (self.pokydirname, self.be.builddir, self.dburl, brbe))
|
||||
|
||||
port = "-1"
|
||||
for i in cmd.split("\n"):
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
print "Found bitbake server port ", port
|
||||
|
||||
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
# FIXME unfortunate sleep 1 - we need to make sure that bbserver is started and the toaster ui is connected
|
||||
# but since they start async without any return, we just wait a bit
|
||||
print "Started server"
|
||||
assert self.be.sourcedir and self._pathexists(self.be.builddir)
|
||||
self.be.bbaddress = self.be.address.split("@")[-1]
|
||||
self.be.bbport = port
|
||||
self.be.bbport = "8200"
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STARTED
|
||||
self.be.save()
|
||||
|
||||
@@ -107,19 +99,6 @@ class SSHBEController(BuildEnvironmentController):
|
||||
self.be.save()
|
||||
print "Stopped server"
|
||||
|
||||
|
||||
def _copyFile(self, filepath1, filepath2):
|
||||
p = subprocess.Popen("scp '%s' '%s'" % (filepath1, filepath2), stdout=subprocess.PIPE, stderr = subprocess.PIPE, shell=True)
|
||||
(out, err) = p.communicate()
|
||||
if p.returncode:
|
||||
raise ShellCmdException(err)
|
||||
|
||||
def pullFile(self, local_filename, remote_filename):
|
||||
_copyFile(local_filename, "%s:%s" % (self.be.address, remote_filename))
|
||||
|
||||
def pushFile(self, local_filename, remote_filename):
|
||||
_copyFile("%s:%s" % (self.be.address, remote_filename), local_filename)
|
||||
|
||||
def setLayers(self, bitbakes, layers):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
|
||||
@@ -127,8 +106,62 @@ class SSHBEController(BuildEnvironmentController):
|
||||
assert len(bitbakes) == 1
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos, and map dirpaths for each layer
|
||||
gitrepos = {}
|
||||
gitrepos[bitbakes[0].giturl] = []
|
||||
gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
|
||||
|
||||
for layer in layers:
|
||||
# we don't process local URLs
|
||||
if layer.giturl.startswith("file://"):
|
||||
continue
|
||||
if not layer.giturl in gitrepos:
|
||||
gitrepos[layer.giturl] = []
|
||||
gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
|
||||
for giturl in gitrepos.keys():
|
||||
commitid = gitrepos[giturl][0][2]
|
||||
for e in gitrepos[giturl]:
|
||||
if commitid != e[2]:
|
||||
raise BuildSetupException("More than one commit per git url, unsupported configuration")
|
||||
|
||||
layerlist = []
|
||||
|
||||
# 2. checkout the repositories
|
||||
for giturl in gitrepos.keys():
|
||||
import os
|
||||
localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
|
||||
print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
|
||||
|
||||
# make sure our directory is a git repository
|
||||
if self._pathexists(localdirname):
|
||||
if not giturl in self._shellcmd("git remote -v", localdirname):
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
|
||||
else:
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
|
||||
# checkout the needed commit
|
||||
commit = gitrepos[giturl][0][2]
|
||||
|
||||
# branch magic name "HEAD" will inhibit checkout
|
||||
if commit != "HEAD":
|
||||
print "DEBUG: checking out commit ", commit, "to", localdirname
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
|
||||
|
||||
# take the localdirname as poky dir if we can find the oe-init-build-env
|
||||
if self.pokydirname is None and self._pathexists(os.path.join(localdirname, "oe-init-build-env")):
|
||||
print "DEBUG: selected poky dir name", localdirname
|
||||
self.pokydirname = localdirname
|
||||
|
||||
# verify our repositories
|
||||
for name, dirpath, commit in gitrepos[giturl]:
|
||||
localdirpath = os.path.join(localdirname, dirpath)
|
||||
if not self._pathexists(localdirpath):
|
||||
raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
|
||||
|
||||
if name != "bitbake":
|
||||
layerlist.append(localdirpath)
|
||||
|
||||
print "DEBUG: current layer list ", layerlist
|
||||
|
||||
raise NotImplementedException("Not implemented: SSH setLayers")
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
@@ -138,15 +171,17 @@ class SSHBEController(BuildEnvironmentController):
|
||||
if not self._pathexists(bblayerconf):
|
||||
raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
|
||||
|
||||
import uuid
|
||||
local_bblayerconf = "/tmp/" + uuid.uuid4() + "-bblayer.conf"
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
self.pullFile(bblayerconf, local_bblayerconf)
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
for i in xrange(len(conflines)):
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
i += 2
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
BuildEnvironmentController._updateBBLayers(local_bblayerconf, layerlist)
|
||||
self.pushFile(local_bblayerconf, bblayerconf)
|
||||
|
||||
os.unlink(local_bblayerconf)
|
||||
bblayerconffile.write("\n# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
self.islayerset = True
|
||||
return True
|
||||
@@ -156,24 +191,3 @@ class SSHBEController(BuildEnvironmentController):
|
||||
import shutil
|
||||
shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
|
||||
assert not self._pathexists(self.be.builddir)
|
||||
|
||||
def triggerBuild(self, bitbake, layers, variables, targets):
|
||||
# set up the buid environment with the needed layers
|
||||
self.setLayers(bitbake, layers)
|
||||
self.writeConfFile("conf/toaster-pre.conf", )
|
||||
self.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = self.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, targets))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
|
||||
bbctrl.build(list(map(lambda x:x.target, targets)), task)
|
||||
|
||||
logger.debug("localhostbecontroller: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % self.be.builddir)
|
||||
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
@@ -7,7 +7,7 @@ Replace this with more appropriate tests for your application.
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from bldcontrol.bbcontroller import BitbakeController, BuildSetupException
|
||||
from bldcontrol.bbcontroller import BitbakeController
|
||||
from bldcontrol.localhostbecontroller import LocalhostBEController
|
||||
from bldcontrol.sshbecontroller import SSHBEController
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
@@ -15,7 +15,6 @@ from bldcontrol.management.commands.runbuilds import Command
|
||||
|
||||
import socket
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
# standard poky data hardcoded for testing
|
||||
BITBAKE_LAYERS = [type('bitbake_info', (object,), { "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "", "commit": "HEAD"})]
|
||||
@@ -30,17 +29,6 @@ POKY_LAYERS = [
|
||||
# we have an abstract test class designed to ensure that the controllers use a single interface
|
||||
# specific controller tests only need to override the _getBuildEnvironment() method
|
||||
|
||||
test_sourcedir = os.getenv("TTS_SOURCE_DIR")
|
||||
test_builddir = os.getenv("TTS_BUILD_DIR")
|
||||
test_address = os.getenv("TTS_TEST_ADDRESS", "localhost")
|
||||
|
||||
if test_sourcedir == None or test_builddir == None or test_address == None:
|
||||
raise Exception("Please set TTTS_SOURCE_DIR, TTS_BUILD_DIR and TTS_TEST_ADDRESS")
|
||||
|
||||
# The bb server will expect a toaster-pre.conf file to exist. If it doesn't exit then we make
|
||||
# an empty one here.
|
||||
open(test_builddir + 'conf/toaster-pre.conf', 'a').close()
|
||||
|
||||
class BEControllerTests(object):
|
||||
|
||||
def _serverForceStop(self, bc):
|
||||
@@ -48,54 +36,35 @@ class BEControllerTests(object):
|
||||
self.assertTrue(err == '', "bitbake server pid %s not stopped" % err)
|
||||
|
||||
def test_serverStartAndStop(self):
|
||||
from bldcontrol.sshbecontroller import NotImplementedException
|
||||
obe = self._getBuildEnvironment()
|
||||
bc = self._getBEController(obe)
|
||||
try:
|
||||
# setting layers, skip any layer info
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS)
|
||||
except NotImplementedException, e:
|
||||
print "Test skipped due to command not implemented yet"
|
||||
return True
|
||||
# We are ok with the exception as we're handling the git already exists
|
||||
except BuildSetupException:
|
||||
pass
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
|
||||
|
||||
bc.pokydirname = test_sourcedir
|
||||
bc.islayerset = True
|
||||
|
||||
hostname = test_address.split("@")[-1]
|
||||
hostname = self.test_address.split("@")[-1]
|
||||
|
||||
# test start server and stop
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Port already occupied")
|
||||
bc.startBBServer()
|
||||
|
||||
self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, int(bc.be.bbport))), "Server not answering")
|
||||
self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not answering")
|
||||
|
||||
bc.stopBBServer()
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, int(bc.be.bbport))), "Server not stopped")
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not stopped")
|
||||
|
||||
self._serverForceStop(bc)
|
||||
|
||||
def test_getBBController(self):
|
||||
from bldcontrol.sshbecontroller import NotImplementedException
|
||||
obe = self._getBuildEnvironment()
|
||||
bc = self._getBEController(obe)
|
||||
layerSet = False
|
||||
try:
|
||||
# setting layers, skip any layer info
|
||||
layerSet = bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS)
|
||||
except NotImplementedException:
|
||||
print "Test skipped due to command not implemented yet"
|
||||
return True
|
||||
# We are ok with the exception as we're handling the git already exists
|
||||
except BuildSetupException:
|
||||
pass
|
||||
|
||||
bc.pokydirname = test_sourcedir
|
||||
bc.islayerset = True
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
|
||||
|
||||
bbc = bc.getBBController()
|
||||
self.assertTrue(isinstance(bbc, BitbakeController))
|
||||
# test set variable, use no build marker -1 for BR value
|
||||
try:
|
||||
bbc.setVariable("TOASTER_BRBE", "%d:%d" % (-1, obe.pk))
|
||||
except Exception as e :
|
||||
self.fail("setVariable raised %s", e)
|
||||
|
||||
bc.stopBBServer()
|
||||
|
||||
self._serverForceStop(bc)
|
||||
@@ -103,15 +72,19 @@ class BEControllerTests(object):
|
||||
class LocalhostBEControllerTests(TestCase, BEControllerTests):
|
||||
def __init__(self, *args):
|
||||
super(LocalhostBEControllerTests, self).__init__(*args)
|
||||
|
||||
# hardcoded for Alex's machine; since the localhost BE is machine-dependent,
|
||||
# I found no good way to abstractize this
|
||||
self.test_sourcedir = "/home/ddalex/ssd/yocto"
|
||||
self.test_builddir = "/home/ddalex/ssd/yocto/build"
|
||||
self.test_address = "localhost"
|
||||
|
||||
def _getBuildEnvironment(self):
|
||||
return BuildEnvironment.objects.create(
|
||||
lock = BuildEnvironment.LOCK_FREE,
|
||||
betype = BuildEnvironment.TYPE_LOCAL,
|
||||
address = test_address,
|
||||
sourcedir = test_sourcedir,
|
||||
builddir = test_builddir )
|
||||
address = self.test_address,
|
||||
sourcedir = self.test_sourcedir,
|
||||
builddir = self.test_builddir )
|
||||
|
||||
def _getBEController(self, obe):
|
||||
return LocalhostBEController(obe)
|
||||
@@ -119,20 +92,25 @@ class LocalhostBEControllerTests(TestCase, BEControllerTests):
|
||||
class SSHBEControllerTests(TestCase, BEControllerTests):
|
||||
def __init__(self, *args):
|
||||
super(SSHBEControllerTests, self).__init__(*args)
|
||||
self.test_address = "ddalex-desktop.local"
|
||||
# hardcoded for ddalex-desktop.local machine; since the localhost BE is machine-dependent,
|
||||
# I found no good way to abstractize this
|
||||
self.test_sourcedir = "/home/ddalex/ssd/yocto"
|
||||
self.test_builddir = "/home/ddalex/ssd/yocto/build"
|
||||
|
||||
def _getBuildEnvironment(self):
|
||||
return BuildEnvironment.objects.create(
|
||||
lock = BuildEnvironment.LOCK_FREE,
|
||||
betype = BuildEnvironment.TYPE_SSH,
|
||||
address = test_address,
|
||||
sourcedir = test_sourcedir,
|
||||
builddir = test_builddir )
|
||||
address = self.test_address,
|
||||
sourcedir = self.test_sourcedir,
|
||||
builddir = self.test_builddir )
|
||||
|
||||
def _getBEController(self, obe):
|
||||
return SSHBEController(obe)
|
||||
|
||||
def test_pathExists(self):
|
||||
obe = BuildEnvironment.objects.create(betype = BuildEnvironment.TYPE_SSH, address= test_address)
|
||||
obe = BuildEnvironment.objects.create(betype = BuildEnvironment.TYPE_SSH, address= self.test_address)
|
||||
sbc = SSHBEController(obe)
|
||||
self.assertTrue(sbc._pathexists("/"))
|
||||
self.assertFalse(sbc._pathexists("/.deadbeef"))
|
||||
@@ -157,8 +135,8 @@ class RunBuildsCommandTests(TestCase):
|
||||
self.assertRaises(IndexError, command._selectBuildEnvironment)
|
||||
|
||||
def test_br_select(self):
|
||||
from orm.models import Project, Release, BitbakeVersion, Branch
|
||||
p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch=Branch.objects.get_or_create(name="HEAD"))[0])[0])
|
||||
from orm.models import Project, Release, BitbakeVersion
|
||||
p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch="HEAD")[0])[0])
|
||||
obr = BuildRequest.objects.create(state = BuildRequest.REQ_QUEUED, project = p)
|
||||
command = Command()
|
||||
br = command._selectBuildRequest()
|
||||
@@ -169,22 +147,3 @@ class RunBuildsCommandTests(TestCase):
|
||||
self.assertTrue(br.state == BuildRequest.REQ_INPROGRESS, "Request is not updated")
|
||||
# no more selections possible here
|
||||
self.assertRaises(IndexError, command._selectBuildRequest)
|
||||
|
||||
|
||||
class UtilityTests(TestCase):
|
||||
def test_reduce_path(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
|
||||
self.assertTrue( _reduce_canon_path("/") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana") == "/ana")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/ana/mihai/../maria") == "/home/ana/maria")
|
||||
|
||||
def test_get_id_for_sorucetype(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
self.assertTrue( _get_id_for_sourcetype("layerindex") == 1)
|
||||
self.assertTrue( _get_id_for_sourcetype("local") == 0)
|
||||
self.assertTrue( _get_id_for_sourcetype("imported") == 2)
|
||||
with self.assertRaises(Exception):
|
||||
_get_id_for_sourcetype("unknown")
|
||||
|
||||
44
bitbake/lib/toaster/bldviewer/api.py
Normal file
44
bitbake/lib/toaster/bldviewer/api.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2013 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.conf.urls import patterns, include, url
|
||||
|
||||
|
||||
urlpatterns = patterns('bldviewer.views',
|
||||
url(r'^builds$', 'model_explorer', {'model_name':'build'}, name='builds'),
|
||||
url(r'^targets$', 'model_explorer', {'model_name':'target'}, name='targets'),
|
||||
url(r'^target_files$', 'model_explorer', {'model_name':'target_file'}, name='target_file'),
|
||||
url(r'^target_image_file$', 'model_explorer', {'model_name':'target_image_file'}, name='target_image_file'),
|
||||
url(r'^tasks$', 'model_explorer', {'model_name':'task'}, name='task'),
|
||||
url(r'^task_dependencies$', 'model_explorer', {'model_name':'task_dependency'}, name='task_dependencies'),
|
||||
url(r'^packages$', 'model_explorer', {'model_name':'package'}, name='package'),
|
||||
url(r'^package_dependencies$', 'model_explorer', {'model_name':'package_dependency'}, name='package_dependency'),
|
||||
url(r'^target_packages$', 'model_explorer', {'model_name':'target_installed_package'}, name='target_packages'),
|
||||
url(r'^target_installed_packages$', 'model_explorer', {'model_name':'target_installed_package'}, name='target_installed_package'),
|
||||
url(r'^package_files$', 'model_explorer', {'model_name':'build_file'}, name='build_file'),
|
||||
url(r'^layers$', 'model_explorer', {'model_name':'layer'}, name='layer'),
|
||||
url(r'^layerversions$', 'model_explorer', {'model_name':'layerversion'}, name='layerversion'),
|
||||
url(r'^recipes$', 'model_explorer', {'model_name':'recipe'}, name='recipe'),
|
||||
url(r'^recipe_dependencies$', 'model_explorer', {'model_name':'recipe_dependency'}, name='recipe_dependencies'),
|
||||
url(r'^variables$', 'model_explorer', {'model_name':'variable'}, name='variables'),
|
||||
url(r'^variableshistory$', 'model_explorer', {'model_name':'variablehistory'}, name='variablehistory'),
|
||||
url(r'^logmessages$', 'model_explorer', {'model_name':'logmessage'}, name='logmessages'),
|
||||
)
|
||||
4797
bitbake/lib/toaster/bldviewer/static/css/bootstrap.css
vendored
Normal file
4797
bitbake/lib/toaster/bldviewer/static/css/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user