mirror of
https://git.yoctoproject.org/poky
synced 2026-02-03 07:18:44 +01:00
Compare commits
383 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6717d19848 | ||
|
|
aeb31e09f0 | ||
|
|
26f0c306cb | ||
|
|
d65bcef0ae | ||
|
|
f1729cdbfa | ||
|
|
925e46623a | ||
|
|
b8603a494d | ||
|
|
dc2ee2feb7 | ||
|
|
9901828b0d | ||
|
|
ad89b1fe80 | ||
|
|
ff505d781d | ||
|
|
905938d0a1 | ||
|
|
1b58412449 | ||
|
|
2cd70c9150 | ||
|
|
9e9bb64fb5 | ||
|
|
39ec0738d4 | ||
|
|
3d7df7b5b5 | ||
|
|
0b1be9dc67 | ||
|
|
b9ec9f7425 | ||
|
|
9e4aad97c3 | ||
|
|
240da75616 | ||
|
|
b2ba41b575 | ||
|
|
56bd68e82c | ||
|
|
0fb598c6b9 | ||
|
|
b0c1820261 | ||
|
|
ccd470ba5f | ||
|
|
90a33dde44 | ||
|
|
b9da1f441b | ||
|
|
ccbb7ef72f | ||
|
|
cafdccb29c | ||
|
|
13eda67126 | ||
|
|
91c507ce1c | ||
|
|
97e9be8130 | ||
|
|
523aaea8e2 | ||
|
|
e625a82af2 | ||
|
|
efde5a1303 | ||
|
|
f1bb6acacc | ||
|
|
7effa6edd0 | ||
|
|
02e603e48c | ||
|
|
2d80a6bc8a | ||
|
|
159f66aea7 | ||
|
|
6b8f7999c3 | ||
|
|
827dc7f12c | ||
|
|
7c0d759c55 | ||
|
|
9ca89fe495 | ||
|
|
c82164fd0a | ||
|
|
586a3d5ff5 | ||
|
|
7849633469 | ||
|
|
367b862d59 | ||
|
|
c4e9d9d9ae | ||
|
|
7a43fb95d1 | ||
|
|
46e8377c42 | ||
|
|
148b7d20d4 | ||
|
|
d759301a34 | ||
|
|
134246d3d4 | ||
|
|
c088bac2f0 | ||
|
|
9766c76268 | ||
|
|
78b1cbcc72 | ||
|
|
3a4ee6bfd9 | ||
|
|
9bb6f7f3f0 | ||
|
|
15919f7e76 | ||
|
|
1e216c8087 | ||
|
|
a67b95ade2 | ||
|
|
1e668ccf1a | ||
|
|
e3dd621197 | ||
|
|
30b8d9378b | ||
|
|
895c86d71d | ||
|
|
07a7905689 | ||
|
|
e76d790bbf | ||
|
|
b60383c1b9 | ||
|
|
19dc8bf950 | ||
|
|
6c576a4ac8 | ||
|
|
9f5ea81070 | ||
|
|
51a5a5df84 | ||
|
|
6c9eb8a67a | ||
|
|
b8e521809b | ||
|
|
994b637d58 | ||
|
|
a85328245d | ||
|
|
61da1b1197 | ||
|
|
3428e70035 | ||
|
|
dbf99ab134 | ||
|
|
18d859a8c9 | ||
|
|
313864bf52 | ||
|
|
e93f9a8382 | ||
|
|
df4a397df9 | ||
|
|
9b3389e023 | ||
|
|
4b22a21b4e | ||
|
|
27a877becf | ||
|
|
2d5bd89565 | ||
|
|
bda51ee782 | ||
|
|
5dd12beccd | ||
|
|
1d21eaf4e0 | ||
|
|
95821e8566 | ||
|
|
57138de0fc | ||
|
|
57a806cc32 | ||
|
|
452619ba41 | ||
|
|
8d28013312 | ||
|
|
570345adfd | ||
|
|
9062377624 | ||
|
|
0143097095 | ||
|
|
4b1b580749 | ||
|
|
65ed47e597 | ||
|
|
608ac7794f | ||
|
|
4dada3c092 | ||
|
|
43903a5bfd | ||
|
|
e464615684 | ||
|
|
7a12eda785 | ||
|
|
d88fa68141 | ||
|
|
3a93bfe1c7 | ||
|
|
9fee4d138b | ||
|
|
87e924e377 | ||
|
|
8de7e102cf | ||
|
|
6780f20525 | ||
|
|
1d04721fe8 | ||
|
|
a4d8015687 | ||
|
|
7b57145498 | ||
|
|
38b1b68923 | ||
|
|
ec3c8fcf81 | ||
|
|
b2f045c400 | ||
|
|
87671f72e7 | ||
|
|
b028947d67 | ||
|
|
f5847d4f24 | ||
|
|
59198004c0 | ||
|
|
984be33145 | ||
|
|
96ee64c96b | ||
|
|
7a8f9114bc | ||
|
|
21ac977e2a | ||
|
|
33a4425d6d | ||
|
|
8abf510a13 | ||
|
|
3ff30c0bfb | ||
|
|
7e68f57dec | ||
|
|
3b998b3f4c | ||
|
|
d8155f1f95 | ||
|
|
e378410fb2 | ||
|
|
3a1d9e9e11 | ||
|
|
26dcc54c60 | ||
|
|
8e05d5e3fe | ||
|
|
5c1f10f56e | ||
|
|
0843e07873 | ||
|
|
793afb3e81 | ||
|
|
aebbf8c8f3 | ||
|
|
c4f1f0f491 | ||
|
|
810dd79720 | ||
|
|
b0ce70ffa8 | ||
|
|
ac2d94b684 | ||
|
|
ce2336ddc7 | ||
|
|
5b8c5ea151 | ||
|
|
7a42bfecc2 | ||
|
|
26db62e359 | ||
|
|
18224a4a46 | ||
|
|
6d0ae0ef44 | ||
|
|
153787d4df | ||
|
|
c55dea6a82 | ||
|
|
4ab29fc58f | ||
|
|
0bc0ee66a8 | ||
|
|
db6819b0c3 | ||
|
|
bc3484e76c | ||
|
|
9d84b2440d | ||
|
|
f8e61ed564 | ||
|
|
1db22d39b5 | ||
|
|
b7bf8bb051 | ||
|
|
839892ed27 | ||
|
|
232af2ec04 | ||
|
|
6a6bd2e96b | ||
|
|
3103f04a30 | ||
|
|
bb27ca7562 | ||
|
|
7dcd9a6b72 | ||
|
|
a43dba8c29 | ||
|
|
d52b91316e | ||
|
|
efbf15ce20 | ||
|
|
3caae900f3 | ||
|
|
3428e6e0e4 | ||
|
|
96ca984621 | ||
|
|
8386f4203d | ||
|
|
b4b50e52d2 | ||
|
|
6101dd2b4c | ||
|
|
ebf62ba85d | ||
|
|
9deb3333b0 | ||
|
|
5bb9a05e0f | ||
|
|
9ee3f77ed9 | ||
|
|
a714cf8700 | ||
|
|
d376e31c92 | ||
|
|
333e5f7076 | ||
|
|
a2fa51bdde | ||
|
|
cb468dfaf0 | ||
|
|
d4c5f12601 | ||
|
|
7e6902963f | ||
|
|
c166a5add3 | ||
|
|
303d17ac3c | ||
|
|
28938930ba | ||
|
|
bff6db6712 | ||
|
|
62b1fef787 | ||
|
|
fc9229e4ba | ||
|
|
8509c1a7e5 | ||
|
|
05d751c23a | ||
|
|
b8f6c7c794 | ||
|
|
474ea6b826 | ||
|
|
21d15fac0e | ||
|
|
e98512e1e3 | ||
|
|
1f80e7f675 | ||
|
|
6a4a66aabb | ||
|
|
c03bb4d0c7 | ||
|
|
f91b780b1a | ||
|
|
938e925356 | ||
|
|
c899777010 | ||
|
|
afb6a3688f | ||
|
|
4209379cc8 | ||
|
|
6add5ac648 | ||
|
|
af515ca686 | ||
|
|
f9f97a1fed | ||
|
|
48169ac9bc | ||
|
|
f091b8a3cf | ||
|
|
38083d01e7 | ||
|
|
278c551168 | ||
|
|
83d1ce9e27 | ||
|
|
d44881fecc | ||
|
|
948b8461e8 | ||
|
|
7bcc609bf0 | ||
|
|
f372806546 | ||
|
|
2361a8171b | ||
|
|
ee4d106987 | ||
|
|
896511d564 | ||
|
|
01c613e4bc | ||
|
|
295dd76931 | ||
|
|
cd7e7addd7 | ||
|
|
ac9725acc5 | ||
|
|
b6124bdbfb | ||
|
|
989013222e | ||
|
|
87eaf4cf4a | ||
|
|
8e22337e22 | ||
|
|
e130d2c8eb | ||
|
|
a692a9182a | ||
|
|
44fddc9ba1 | ||
|
|
8bbd5958b0 | ||
|
|
6d898aef4c | ||
|
|
412cb58083 | ||
|
|
57ccbc4c15 | ||
|
|
5d3c54a318 | ||
|
|
e5727ad31a | ||
|
|
0cafa0eafe | ||
|
|
bea6067392 | ||
|
|
c056b5e9a2 | ||
|
|
7bb4692ead | ||
|
|
dea4a69cfc | ||
|
|
53d2def225 | ||
|
|
94e2a1793e | ||
|
|
b20ba9c4e5 | ||
|
|
7f4ff1a5c5 | ||
|
|
8fd7098318 | ||
|
|
9662a47204 | ||
|
|
78366c7e2c | ||
|
|
287c3bec51 | ||
|
|
80f625a364 | ||
|
|
978c6c00d6 | ||
|
|
090cb60d49 | ||
|
|
2784c08229 | ||
|
|
b1ab59a8d0 | ||
|
|
3141bc16a5 | ||
|
|
b057375f77 | ||
|
|
4f0c5e5b32 | ||
|
|
9fb409bcc5 | ||
|
|
3d95a1cce5 | ||
|
|
361ddb10de | ||
|
|
133472e7aa | ||
|
|
e4b9dabfbb | ||
|
|
45dbb4a080 | ||
|
|
517c2cc88d | ||
|
|
a1958d47c6 | ||
|
|
6547137fa3 | ||
|
|
651f3dc078 | ||
|
|
8333887235 | ||
|
|
33a8687635 | ||
|
|
7fee883b8b | ||
|
|
e6ea60b131 | ||
|
|
342eff6b38 | ||
|
|
8e5103a026 | ||
|
|
52fa8b8582 | ||
|
|
7892063223 | ||
|
|
5f4a75f904 | ||
|
|
b4e7ebe227 | ||
|
|
9ac13c344b | ||
|
|
9b6c56a07d | ||
|
|
02faddb5ca | ||
|
|
77439dafd0 | ||
|
|
5709daae36 | ||
|
|
f0a153a7f6 | ||
|
|
f2103de785 | ||
|
|
ec984f1697 | ||
|
|
619c449b68 | ||
|
|
8bd20eb128 | ||
|
|
2645411074 | ||
|
|
d8b564530e | ||
|
|
af91e98e32 | ||
|
|
46c39b60c5 | ||
|
|
9153d11e6c | ||
|
|
de20bf01e4 | ||
|
|
56aaa6450b | ||
|
|
5ca9285434 | ||
|
|
7631f6bbfc | ||
|
|
08e2f06d36 | ||
|
|
424643f463 | ||
|
|
84396ed610 | ||
|
|
b28a902253 | ||
|
|
b94ebc582f | ||
|
|
07600df4cb | ||
|
|
00d8024741 | ||
|
|
aa39d9a2df | ||
|
|
98ad3cb2c0 | ||
|
|
88b7b1a88a | ||
|
|
ec1f93c50c | ||
|
|
6157ab451b | ||
|
|
5306aaab07 | ||
|
|
31ab5dafa8 | ||
|
|
84d524c938 | ||
|
|
7bbc4b8a77 | ||
|
|
33dfe60c35 | ||
|
|
78217d37d2 | ||
|
|
69d4c63428 | ||
|
|
1eb75407ae | ||
|
|
2c79d57ded | ||
|
|
de87ba4b37 | ||
|
|
14a666b094 | ||
|
|
84bcf66436 | ||
|
|
cba4a8b80d | ||
|
|
c23e7052fb | ||
|
|
7253253972 | ||
|
|
cddb415f72 | ||
|
|
6aed9f819d | ||
|
|
30ac79c16d | ||
|
|
3f00873a8a | ||
|
|
4f6fb8c362 | ||
|
|
a3dcfa6a6a | ||
|
|
e0999660a8 | ||
|
|
897b87195c | ||
|
|
1dfcb8968c | ||
|
|
3f7bfb38a2 | ||
|
|
5b09536d38 | ||
|
|
21cd3d6212 | ||
|
|
4dc19ba0a9 | ||
|
|
bbaf0c65f1 | ||
|
|
0cb01121eb | ||
|
|
f9c2b9083e | ||
|
|
3c8da7d5bc | ||
|
|
3e49cee7e8 | ||
|
|
0ba2239abb | ||
|
|
87a71c5017 | ||
|
|
761c6172f6 | ||
|
|
b958f2e6dc | ||
|
|
4123b4e575 | ||
|
|
9301072deb | ||
|
|
6d3e061287 | ||
|
|
3ff180c173 | ||
|
|
19b9fde3b2 | ||
|
|
bdc27cc405 | ||
|
|
7e30874db2 | ||
|
|
f9d0fd9bb1 | ||
|
|
3353d6bcce | ||
|
|
904c35e049 | ||
|
|
9ff3a1de42 | ||
|
|
9ab4d1f5e6 | ||
|
|
a0f9efe7d6 | ||
|
|
a6193f3822 | ||
|
|
f0cbff052e | ||
|
|
1929766ed5 | ||
|
|
bd1e9a6a3a | ||
|
|
409d3cb7a2 | ||
|
|
49efe23169 | ||
|
|
46c0518279 | ||
|
|
40396bee2b | ||
|
|
cdbe3b5cee | ||
|
|
aba074edbf | ||
|
|
f11e51056d | ||
|
|
9a178b6016 | ||
|
|
d8ee1658de | ||
|
|
b5c29e15f4 | ||
|
|
21da2dbb78 | ||
|
|
520b36fe41 | ||
|
|
82733c9f71 | ||
|
|
32857c5596 | ||
|
|
1d6146e0b1 | ||
|
|
a095826126 | ||
|
|
fd435cbfc5 | ||
|
|
6ca67b3288 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -8,7 +8,7 @@ scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
!meta-hob
|
||||
hob-image-*.bb
|
||||
*.swp
|
||||
*.orig
|
||||
|
||||
29
README
29
README
@@ -30,29 +30,20 @@ For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
meta-yocto(-bsp):
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -52,13 +52,6 @@ The following boards are supported by the meta-yocto-bsp layer:
|
||||
For more information see the board's section below. The appropriate MACHINE
|
||||
variable value corresponding to the board is given in brackets.
|
||||
|
||||
Reference Board Maintenance
|
||||
===========================
|
||||
|
||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
||||
|
||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
||||
|
||||
Consumer Devices
|
||||
================
|
||||
@@ -251,14 +244,14 @@ if used via a usb card reader):
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
6. If using core-image-minimal rootfs, install the kernel uImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
# cp uImage-beaglebone.bin /media/root/boot/uImage
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
# cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
@@ -324,22 +317,6 @@ Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
||||
=> bootm 1000000 - 2000000
|
||||
|
||||
--- Booting from JFFS2 root ---
|
||||
|
||||
1. First boot the board with NFS root.
|
||||
|
||||
2. Erase the MTD partition which will be used as root:
|
||||
|
||||
$ flash_eraseall /dev/mtd3
|
||||
|
||||
3. Copy the JFFS2 image to the MTD partition:
|
||||
|
||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
||||
|
||||
4. Then reboot the board and set up the environment in U-Boot:
|
||||
|
||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
||||
|
||||
|
||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
==============================================
|
||||
@@ -352,14 +329,11 @@ Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
* TFTP server installed on your workstation
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -367,7 +341,7 @@ Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
--- Booting from NFS root ---
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
|
||||
|
||||
@@ -23,22 +23,335 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
|
||||
import optparse
|
||||
import warnings
|
||||
from traceback import format_exception
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters
|
||||
|
||||
__version__ = "1.22.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
if not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
sys.exit("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. Parameters are passed to the signature handling code, use 'none' if no specific handler is required.",
|
||||
action = "append", dest = "dump_signatures", default = [])
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-package environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
return server
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
configParams = BitBakeConfigParameters()
|
||||
configuration = cookerdata.CookerConfiguration()
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
server.saveConnectionConfigParams(configParams)
|
||||
|
||||
if not configParams.server_only:
|
||||
if configParams.status_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except:
|
||||
sys.exit(1)
|
||||
if not server_connection:
|
||||
sys.exit(1)
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection(featureset)
|
||||
if not server_connection:
|
||||
if configParams.kill_server:
|
||||
bb.fatal("Server already killed")
|
||||
configParams.bind = configParams.remote_server
|
||||
start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
server_connection = server.establishConnection(featureset)
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration())
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
ret = 1
|
||||
except Exception:
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2011-2015 Intel Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -20,15 +20,13 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
from collections import defaultdict
|
||||
import argparse
|
||||
import re
|
||||
import httplib, urlparse, json
|
||||
import subprocess
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
@@ -41,24 +39,26 @@ import bb.utils
|
||||
import bb.tinfoil
|
||||
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
console = logging.StreamHandler(output)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create('bitbake-layers', sys.stdout)
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
|
||||
class Commands():
|
||||
def main(args):
|
||||
cmds = Commands()
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
cmds.onecmd(' '.join(args))
|
||||
else:
|
||||
cmds.do_help('')
|
||||
return cmds.returncode
|
||||
|
||||
|
||||
class Commands(cmd.Cmd):
|
||||
def __init__(self):
|
||||
self.bbhandler = None
|
||||
self.returncode = 0
|
||||
self.bblayers = []
|
||||
cmd.Cmd.__init__(self)
|
||||
|
||||
def init_bbhandler(self, config_only = False):
|
||||
if not self.bbhandler:
|
||||
@@ -66,6 +66,27 @@ class Commands():
|
||||
self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
self.bbhandler.prepare(config_only)
|
||||
|
||||
def default(self, line):
|
||||
"""Handle unrecognised commands"""
|
||||
sys.stderr.write("Unrecognised command or option\n")
|
||||
self.do_help('')
|
||||
|
||||
def do_help(self, topic):
|
||||
"""display general help or help on a specified command"""
|
||||
if topic:
|
||||
sys.stdout.write('%s: ' % topic)
|
||||
cmd.Cmd.do_help(self, topic.replace('-', '_'))
|
||||
else:
|
||||
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||
sys.stdout.write("Available commands:\n")
|
||||
procnames = list(set(self.get_names()))
|
||||
for procname in procnames:
|
||||
if procname[:3] == 'do_':
|
||||
sys.stdout.write(" %s\n" % procname[3:].replace('_', '-'))
|
||||
doc = getattr(self, procname).__doc__
|
||||
if doc:
|
||||
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||
|
||||
def do_show_layers(self, args):
|
||||
"""show current configured layers"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
@@ -82,293 +103,6 @@ class Commands():
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||
|
||||
|
||||
def do_add_layer(self, args):
|
||||
"""Add a layer to bblayers.conf
|
||||
|
||||
Adds the specified layer to bblayers.conf
|
||||
"""
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
if not os.path.exists(layerdir):
|
||||
sys.stderr.write("Specified layer directory doesn't exist\n")
|
||||
return 1
|
||||
|
||||
layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
|
||||
if not os.path.exists(layer_conf):
|
||||
sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
|
||||
return 1
|
||||
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
(notadded, _) = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
|
||||
if notadded:
|
||||
for item in notadded:
|
||||
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
|
||||
|
||||
|
||||
def do_remove_layer(self, args):
|
||||
"""Remove a layer from bblayers.conf
|
||||
|
||||
Removes the specified layer from bblayers.conf
|
||||
"""
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
if args.layerdir.startswith('*'):
|
||||
layerdir = args.layerdir
|
||||
else:
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
|
||||
if notremoved:
|
||||
for item in notremoved:
|
||||
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
|
||||
return 1
|
||||
|
||||
|
||||
def get_json_data(self, apiurl):
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
conn = None
|
||||
_parsedurl = urlparse.urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
query = _parsedurl.query
|
||||
def parse_url(url):
|
||||
parsedurl = urlparse.urlparse(url)
|
||||
if parsedurl.netloc[0] == '[':
|
||||
host, port = parsedurl.netloc[1:].split(']', 1)
|
||||
if ':' in port:
|
||||
port = port.rsplit(':', 1)[1]
|
||||
else:
|
||||
port = None
|
||||
else:
|
||||
if parsedurl.netloc.count(':') == 1:
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
else:
|
||||
host = parsedurl.netloc
|
||||
port = None
|
||||
return (host, 80 if port is None else int(port))
|
||||
|
||||
if proxy_settings is None:
|
||||
host, port = parse_url(apiurl)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", path + "?" + query)
|
||||
else:
|
||||
host, port = parse_url(proxy_settings)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", apiurl)
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status != 200:
|
||||
raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
|
||||
return json.loads(r.read())
|
||||
|
||||
|
||||
def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False):
|
||||
def layeritems_info_id(items_name, layeritems):
|
||||
litems_id = None
|
||||
for li in layeritems:
|
||||
if li['name'] == items_name:
|
||||
litems_id = li['id']
|
||||
break
|
||||
return litems_id
|
||||
|
||||
def layerbranches_info(items_id, layerbranches):
|
||||
lbranch = {}
|
||||
for lb in layerbranches:
|
||||
if lb['layer'] == items_id and lb['branch'] == branchnum:
|
||||
lbranch['id'] = lb['id']
|
||||
lbranch['vcs_subdir'] = lb['vcs_subdir']
|
||||
break
|
||||
return lbranch
|
||||
|
||||
def layerdependencies_info(lb_id, layerdependencies):
|
||||
ld_deps = []
|
||||
for ld in layerdependencies:
|
||||
if ld['layerbranch'] == lb_id and not ld['dependency'] in ld_deps:
|
||||
ld_deps.append(ld['dependency'])
|
||||
if not ld_deps:
|
||||
logger.error("The dependency of layerDependencies is not found.")
|
||||
return ld_deps
|
||||
|
||||
def layeritems_info_name_subdir(items_id, layeritems):
|
||||
litems = {}
|
||||
for li in layeritems:
|
||||
if li['id'] == items_id:
|
||||
litems['vcs_url'] = li['vcs_url']
|
||||
litems['name'] = li['name']
|
||||
break
|
||||
return litems
|
||||
|
||||
if selfname:
|
||||
selfid = layeritems_info_id(layername, layeritems)
|
||||
lbinfo = layerbranches_info(selfid, layerbranches)
|
||||
if lbinfo:
|
||||
selfsubdir = lbinfo['vcs_subdir']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return
|
||||
selfurl = layeritems_info_name_subdir(selfid, layeritems)['vcs_url']
|
||||
if selfurl:
|
||||
return selfurl, selfsubdir
|
||||
else:
|
||||
logger.error("Cannot get layer %s git repo and subdir" % layername)
|
||||
return
|
||||
ldict = {}
|
||||
itemsid = layeritems_info_id(layername, layeritems)
|
||||
if not itemsid:
|
||||
return layername, None
|
||||
lbid = layerbranches_info(itemsid, layerbranches)
|
||||
if lbid:
|
||||
lbid = layerbranches_info(itemsid, layerbranches)['id']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return None, None
|
||||
for dependency in layerdependencies_info(lbid, layerdependencies):
|
||||
lname = layeritems_info_name_subdir(dependency, layeritems)['name']
|
||||
lurl = layeritems_info_name_subdir(dependency, layeritems)['vcs_url']
|
||||
lsubdir = layerbranches_info(dependency, layerbranches)['vcs_subdir']
|
||||
ldict[lname] = lurl, lsubdir
|
||||
return None, ldict
|
||||
|
||||
|
||||
def get_fetch_layer(self, fetchdir, url, subdir, fetch_layer):
|
||||
layername = self.get_layer_name(url)
|
||||
if os.path.splitext(layername)[1] == '.git':
|
||||
layername = os.path.splitext(layername)[0]
|
||||
repodir = os.path.join(fetchdir, layername)
|
||||
layerdir = os.path.join(repodir, subdir)
|
||||
if not os.path.exists(repodir):
|
||||
if fetch_layer:
|
||||
result = subprocess.call('git clone %s %s' % (url, repodir), shell = True)
|
||||
if result:
|
||||
logger.error("Failed to download %s" % url)
|
||||
return None, None
|
||||
else:
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.plain("Repository %s needs to be fetched" % url)
|
||||
return layername, layerdir
|
||||
elif os.path.exists(layerdir):
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.error("%s is not in %s" % (url, subdir))
|
||||
return None, None
|
||||
|
||||
|
||||
def do_layerindex_fetch(self, args):
|
||||
"""Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
|
||||
"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
apiurl = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True)
|
||||
if not apiurl:
|
||||
logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
|
||||
else:
|
||||
if apiurl[-1] != '/':
|
||||
apiurl += '/'
|
||||
apiurl += "api/"
|
||||
apilinks = self.get_json_data(apiurl)
|
||||
branches = self.get_json_data(apilinks['branches'])
|
||||
|
||||
branchnum = 0
|
||||
for branch in branches:
|
||||
if branch['name'] == args.branch:
|
||||
branchnum = branch['id']
|
||||
break
|
||||
if branchnum == 0:
|
||||
validbranches = ', '.join([branch['name'] for branch in branches])
|
||||
logger.error('Invalid layer branch name "%s". Valid branches: %s' % (args.branch, validbranches))
|
||||
return 1
|
||||
|
||||
ignore_layers = []
|
||||
for collection in self.bbhandler.config_data.getVar('BBFILE_COLLECTIONS', True).split():
|
||||
lname = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True)
|
||||
if lname:
|
||||
ignore_layers.append(lname)
|
||||
|
||||
if args.ignore:
|
||||
ignore_layers.extend(args.ignore.split(','))
|
||||
|
||||
layeritems = self.get_json_data(apilinks['layerItems'])
|
||||
layerbranches = self.get_json_data(apilinks['layerBranches'])
|
||||
layerdependencies = self.get_json_data(apilinks['layerDependencies'])
|
||||
invaluenames = []
|
||||
repourls = {}
|
||||
printlayers = []
|
||||
def query_dependencies(layers, layeritems, layerbranches, layerdependencies, branchnum):
|
||||
depslayer = []
|
||||
for layername in layers:
|
||||
invaluename, layerdict = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
if layerdict:
|
||||
repourls[layername] = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=True)
|
||||
for layer in layerdict:
|
||||
if not layer in ignore_layers:
|
||||
depslayer.append(layer)
|
||||
printlayers.append((layername, layer, layerdict[layer][0], layerdict[layer][1]))
|
||||
if not layer in ignore_layers and not layer in repourls:
|
||||
repourls[layer] = (layerdict[layer][0], layerdict[layer][1])
|
||||
if invaluename and not invaluename in invaluenames:
|
||||
invaluenames.append(invaluename)
|
||||
return depslayer
|
||||
|
||||
depslayers = query_dependencies(args.layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
while depslayers:
|
||||
depslayer = query_dependencies(depslayers, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
depslayers = depslayer
|
||||
if invaluenames:
|
||||
for invaluename in invaluenames:
|
||||
logger.error('Layer "%s" not found in layer index' % invaluename)
|
||||
return 1
|
||||
logger.plain("%s %s %s %s" % ("Layer".ljust(19), "Required by".ljust(19), "Git repository".ljust(54), "Subdirectory"))
|
||||
logger.plain('=' * 115)
|
||||
for layername in args.layername:
|
||||
layerurl = repourls[layername]
|
||||
logger.plain("%s %s %s %s" % (layername.ljust(20), '-'.ljust(20), layerurl[0].ljust(55), layerurl[1]))
|
||||
printedlayers = []
|
||||
for layer, dependency, gitrepo, subdirectory in printlayers:
|
||||
if dependency in printedlayers:
|
||||
continue
|
||||
logger.plain("%s %s %s %s" % (dependency.ljust(20), layer.ljust(20), gitrepo.ljust(55), subdirectory))
|
||||
printedlayers.append(dependency)
|
||||
|
||||
if repourls:
|
||||
fetchdir = self.bbhandler.config_data.getVar('BBLAYERS_FETCH_DIR', True)
|
||||
if not fetchdir:
|
||||
logger.error("Cannot get BBLAYERS_FETCH_DIR")
|
||||
return 1
|
||||
if not os.path.exists(fetchdir):
|
||||
os.makedirs(fetchdir)
|
||||
addlayers = []
|
||||
for repourl, subdir in repourls.values():
|
||||
name, layerdir = self.get_fetch_layer(fetchdir, repourl, subdir, not args.show_only)
|
||||
if not name:
|
||||
# Error already shown
|
||||
return 1
|
||||
addlayers.append((subdir, name, layerdir))
|
||||
if not args.show_only:
|
||||
for subdir, name, layerdir in set(addlayers):
|
||||
if os.path.exists(layerdir):
|
||||
if subdir:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % subdir)
|
||||
else:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % name)
|
||||
localargs = argparse.Namespace()
|
||||
localargs.layerdir = layerdir
|
||||
self.do_add_layer(localargs)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def do_layerindex_show_depends(self, args):
|
||||
"""Find layer dependencies from layer index.
|
||||
"""
|
||||
args.show_only = True
|
||||
args.ignore = []
|
||||
self.do_layerindex_fetch(args)
|
||||
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
@@ -381,13 +115,32 @@ Removes the specified layer from bblayers.conf
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, args.same_version, args.filenames, True)
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-s':
|
||||
show_same_ver_only = True
|
||||
else:
|
||||
sys.stderr.write("show-overlayed: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = defaultdict(list)
|
||||
@@ -414,7 +167,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
@@ -428,7 +181,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
@@ -443,15 +196,38 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
Lists the names of recipes and the available versions in each
|
||||
usage: show-recipes [-f] [-m] [pnspec]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with other available recipes indented underneath
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
pnspec = None
|
||||
title = 'Available recipes:'
|
||||
self.list_recipes(title, args.pnspec, False, False, args.filenames, args.multiple)
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-m':
|
||||
show_multi_provider_only = True
|
||||
elif not arg.startswith('-'):
|
||||
pnspec = arg
|
||||
title = 'Available recipes matching %s:' % pnspec
|
||||
else:
|
||||
sys.stderr.write("show-recipes: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only)
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
@@ -501,13 +277,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
|
||||
preffile = realfn[0]
|
||||
# We only display once per recipe, we should prefer non extended versions of the
|
||||
# recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
|
||||
# which would otherwise sort first).
|
||||
if realfn[1] and realfn[0] in self.bbhandler.cooker.recipecache.pkg_fn:
|
||||
continue
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
@@ -537,7 +307,9 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flatten layer configuration into a separate output directory.
|
||||
"""flattens layer configuration into a separate output directory.
|
||||
|
||||
usage: flatten [layer1 layer2 [layer3]...] <outputdir>
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
@@ -559,19 +331,26 @@ bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
if len(args.layer) == 1:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
return 1
|
||||
arglist = args.split()
|
||||
if len(arglist) < 1:
|
||||
logger.error('Please specify an output directory')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = args.outputdir
|
||||
if len(arglist) == 2:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = arglist[-1]
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
return 1
|
||||
return
|
||||
|
||||
self.init_bbhandler()
|
||||
layers = self.bblayers
|
||||
if len(args.layer) > 2:
|
||||
layernames = args.layer
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
@@ -720,12 +499,14 @@ build results (as the layer priority order has effectively changed).
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
Lists recipes with the bbappends that apply to them as subitems.
|
||||
usage: show-appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
if not self.bbhandler.cooker.collection.appendlist:
|
||||
logger.plain('No append files found')
|
||||
return 0
|
||||
return
|
||||
|
||||
logger.plain('=== Appended recipes ===')
|
||||
|
||||
@@ -764,6 +545,7 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
if best_filename in missing:
|
||||
logger.warn('%s: missing append for preferred version',
|
||||
best_filename)
|
||||
self.returncode |= 1
|
||||
|
||||
|
||||
def get_appends_for_files(self, filenames):
|
||||
@@ -774,7 +556,7 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
continue
|
||||
|
||||
basename = os.path.basename(filename)
|
||||
appends = self.bbhandler.cooker.collection.get_file_appends(basename)
|
||||
appends = self.bbhandler.cooker.collection.appendlist.get(basename)
|
||||
if appends:
|
||||
appended.append((basename, list(appends)))
|
||||
else:
|
||||
@@ -782,24 +564,35 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
return appended, notappended
|
||||
|
||||
def do_show_cross_depends(self, args):
|
||||
"""Show dependencies between recipes that cross layer boundaries.
|
||||
"""figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Figure out the dependencies between recipes that cross layer boundaries.
|
||||
usage: show-cross-depends [-f]
|
||||
|
||||
NOTE: .bbappend files can impact the dependencies.
|
||||
Figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Options:
|
||||
-f show full file path
|
||||
|
||||
NOTE:
|
||||
The .bbappend file can impact the dependency.
|
||||
"""
|
||||
ignore_layers = (args.ignore or '').split(',')
|
||||
|
||||
self.init_bbhandler()
|
||||
|
||||
show_filenames = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
else:
|
||||
sys.stderr.write("show-cross-depends: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
pkg_fn = self.bbhandler.cooker_data.pkg_fn
|
||||
bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
|
||||
self.require_re = re.compile(r"require\s+(.+)")
|
||||
self.include_re = re.compile(r"include\s+(.+)")
|
||||
self.inherit_re = re.compile(r"inherit\s+(.+)")
|
||||
|
||||
global_inherit = (self.bbhandler.config_data.getVar('INHERIT', True) or "").split()
|
||||
|
||||
# The bb's DEPENDS and RDEPENDS
|
||||
for f in pkg_fn:
|
||||
f = bb.cache.Cache.virtualfn2realfn(f)[0]
|
||||
@@ -814,7 +607,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data,
|
||||
self.bbhandler.cooker_data.pkg_pn)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], show_filenames)
|
||||
|
||||
# The RDPENDS
|
||||
all_rdeps = self.bbhandler.cooker_data.rundeps[f].values()
|
||||
@@ -828,33 +621,10 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
for rdep in all_rdeps:
|
||||
all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rdep)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rdep,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
|
||||
# The RRECOMMENDS
|
||||
all_rrecs = self.bbhandler.cooker_data.runrecs[f].values()
|
||||
# Remove the duplicated or null one.
|
||||
sorted_rrecs = {}
|
||||
# The all_rrecs is the list in list, so we need two for loops
|
||||
for k1 in all_rrecs:
|
||||
for k2 in k1:
|
||||
sorted_rrecs[k2] = 1
|
||||
all_rrecs = sorted_rrecs.keys()
|
||||
for rrec in all_rrecs:
|
||||
all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rrec)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rrec,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, show_filenames)
|
||||
|
||||
# The inherit class
|
||||
cls_re = re.compile('classes/')
|
||||
@@ -864,12 +634,9 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
# The inherits' format is [classes/cls, /path/to/classes/cls]
|
||||
# ignore the classes/cls.
|
||||
if not cls_re.match(cls):
|
||||
classname = os.path.splitext(os.path.basename(cls))[0]
|
||||
if classname in global_inherit:
|
||||
continue
|
||||
inherit_layername = self.get_file_layer(cls)
|
||||
if inherit_layername != layername and not inherit_layername in ignore_layers:
|
||||
if not args.filenames:
|
||||
if inherit_layername != layername:
|
||||
if not show_filenames:
|
||||
f_short = self.remove_layer_prefix(f)
|
||||
cls = self.remove_layer_prefix(cls)
|
||||
else:
|
||||
@@ -889,7 +656,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr:
|
||||
pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1]
|
||||
needed_file = re.sub(r"\${PV}", pv, needed_file)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, show_filenames)
|
||||
line = fnfile.readline()
|
||||
fnfile.close()
|
||||
|
||||
@@ -916,22 +683,21 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
bbclass=".bbclass"
|
||||
# Find a 'require/include xxxx'
|
||||
if m:
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, show_filenames)
|
||||
line = ffile.readline()
|
||||
ffile.close()
|
||||
|
||||
def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames, ignore_layers):
|
||||
def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames):
|
||||
"""Print the depends that crosses a layer boundary"""
|
||||
needed_file = bb.utils.which(bbpath, needed_filename)
|
||||
if needed_file:
|
||||
# Which layer is this file from
|
||||
needed_layername = self.get_file_layer(needed_file)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if needed_layername != layername:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
needed_file = self.remove_layer_prefix(needed_file)
|
||||
logger.plain("%s %s %s" %(f, keyword, needed_file))
|
||||
|
||||
def match_inherit(self, line):
|
||||
"""Match the inherit xxx line"""
|
||||
return (self.inherit_re.match(line), "inherits")
|
||||
@@ -945,95 +711,16 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
keyword = "includes"
|
||||
return (m, keyword)
|
||||
|
||||
def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames, ignore_layers):
|
||||
def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames):
|
||||
"""Print the DEPENDS/RDEPENDS file that crosses a layer boundary"""
|
||||
best_realfn = bb.cache.Cache.virtualfn2realfn(needed_file)[0]
|
||||
needed_layername = self.get_file_layer(best_realfn)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if needed_layername != layername:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
best_realfn = self.remove_layer_prefix(best_realfn)
|
||||
|
||||
logger.plain("%s %s %s" % (f, keyword, best_realfn))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
cmds = Commands()
|
||||
|
||||
def add_command(cmdname, function, *args, **kwargs):
|
||||
# Convert docstring for function to help (one-liner shown in main --help) and description (shown in subcommand --help)
|
||||
docsplit = function.__doc__.splitlines()
|
||||
help = docsplit[0]
|
||||
if len(docsplit) > 1:
|
||||
desc = '\n'.join(docsplit[1:])
|
||||
else:
|
||||
desc = help
|
||||
subparser = subparsers.add_parser(cmdname, *args, help=help, description=desc, formatter_class=argparse.RawTextHelpFormatter, **kwargs)
|
||||
subparser.set_defaults(func=function)
|
||||
return subparser
|
||||
|
||||
parser = argparse.ArgumentParser(description="BitBake layers utility",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
|
||||
parser_show_layers = add_command('show-layers', cmds.do_show_layers)
|
||||
|
||||
parser_add_layer = add_command('add-layer', cmds.do_add_layer)
|
||||
parser_add_layer.add_argument('layerdir', help='Layer directory to add')
|
||||
|
||||
parser_remove_layer = add_command('remove-layer', cmds.do_remove_layer)
|
||||
parser_remove_layer.add_argument('layerdir', help='Layer directory to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
parser_remove_layer.set_defaults(func=cmds.do_remove_layer)
|
||||
|
||||
parser_show_overlayed = add_command('show-overlayed', cmds.do_show_overlayed)
|
||||
parser_show_overlayed.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_overlayed.add_argument('-s', '--same-version', help='only list overlayed recipes where the version is the same', action='store_true')
|
||||
|
||||
parser_show_recipes = add_command('show-recipes', cmds.do_show_recipes)
|
||||
parser_show_recipes.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_recipes.add_argument('-m', '--multiple', help='only list where multiple recipes (in the same layer or different layers) exist for the same recipe name', action='store_true')
|
||||
parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
|
||||
parser_show_appends = add_command('show-appends', cmds.do_show_appends)
|
||||
|
||||
parser_flatten = add_command('flatten', cmds.do_flatten)
|
||||
parser_flatten.add_argument('layer', nargs='*', help='Optional layer(s) to flatten (otherwise all are flattened)')
|
||||
parser_flatten.add_argument('outputdir', help='Output directory')
|
||||
|
||||
parser_show_cross_depends = add_command('show-cross-depends', cmds.do_show_cross_depends)
|
||||
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
|
||||
parser_show_cross_depends.add_argument('-i', '--ignore', help='ignore dependencies on items in the specified layer(s) (split multiple layer names with commas, no spaces)', metavar='LAYERNAME')
|
||||
|
||||
parser_layerindex_fetch = add_command('layerindex-fetch', cmds.do_layerindex_fetch)
|
||||
parser_layerindex_fetch.add_argument('-n', '--show-only', help='show dependencies and do nothing else', action='store_true')
|
||||
parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
|
||||
parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
|
||||
|
||||
parser_layerindex_show_depends = add_command('layerindex-show-depends', cmds.do_layerindex_show_depends)
|
||||
parser_layerindex_show_depends.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_show_depends.add_argument('layername', nargs='+', help='layer to query')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
ret = args.func(args)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc(5)
|
||||
sys.exit(ret)
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]) or 0)
|
||||
|
||||
@@ -12,18 +12,10 @@ import errno
|
||||
import signal
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
if len(sys.argv) != 2 or sys.argv[1] != "decafbad":
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1] == "decafbadbad":
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
@@ -89,11 +81,6 @@ def workerlog_write(msg):
|
||||
lf.write(msg)
|
||||
lf.flush()
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
@@ -142,13 +129,10 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
@@ -156,11 +140,8 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
bb.event.worker_fire = worker_child_fire
|
||||
worker_pipe = pipeout
|
||||
|
||||
# Make the child the process group leader and ensure no
|
||||
# child process will be controlled by the current terminal
|
||||
# This ensures signals sent to the controlling terminal like Ctrl+C
|
||||
# don't stop the child processes.
|
||||
os.setsid()
|
||||
# Make the child the process group leader
|
||||
os.setpgid(0, 0)
|
||||
# No stdin
|
||||
newsi = os.open(os.devnull, os.O_RDWR)
|
||||
os.dup2(newsi, sys.stdin.fileno())
|
||||
@@ -173,11 +154,15 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
data.setVar("DATE", workerdata["date"])
|
||||
data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["hashes"], workerdata["hash_deps"], workerdata["sigchecksums"])
|
||||
ret = 0
|
||||
try:
|
||||
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
for h in workerdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, workerdata["hashes"][h])
|
||||
for h in workerdata["hash_deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, workerdata["hash_deps"][h])
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
@@ -198,22 +183,11 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
logger.critical(str(exc))
|
||||
os._exit(1)
|
||||
try:
|
||||
if cfg.dry_run:
|
||||
return 0
|
||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
if not cfg.dry_run:
|
||||
ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
os._exit(ret)
|
||||
except:
|
||||
os._exit(1)
|
||||
if not profiling:
|
||||
os._exit(child())
|
||||
else:
|
||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
ret = profile.Profile.runcall(prof, child)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
@@ -271,14 +245,9 @@ class BitbakeWorker(object):
|
||||
self.build_pipes = {}
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
@@ -393,16 +362,7 @@ class BitbakeWorker(object):
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
profname = "profile-worker.log"
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
profile.Profile.runcall(prof, worker.serve)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
worker.serve()
|
||||
except BaseException as e:
|
||||
if not normalexit:
|
||||
import traceback
|
||||
|
||||
@@ -34,7 +34,7 @@ from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
|
||||
@@ -16,15 +16,9 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
# This script can be run in two modes.
|
||||
|
||||
# When used with "source", from a build directory,
|
||||
# it enables toaster event logging and starts the bitbake resident server.
|
||||
# use as: source toaster [start|stop] [noweb] [noui]
|
||||
|
||||
# When it is called as a stand-alone script, it starts just the
|
||||
# web server, and the building shall be done through the web interface.
|
||||
# As script, it will not return to the command prompt. Stop with Ctrl-C.
|
||||
# This script enables toaster event logging and
|
||||
# starts bitbake resident server
|
||||
# use as: source toaster [start|stop]
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
@@ -36,8 +30,6 @@ function webserverKillAll()
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
@@ -46,12 +38,6 @@ function webserverKillAll()
|
||||
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
@@ -63,19 +49,12 @@ function webserverStartAll()
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "x$TOASTER_MANAGED" == "x1" ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
@@ -85,10 +64,8 @@ function webserverStartAll()
|
||||
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
@@ -103,7 +80,7 @@ function stop_system()
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
BBSERVER=localhost:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# force stop any misbehaving bitbake server
|
||||
@@ -126,136 +103,36 @@ function notify_chldexit() {
|
||||
}
|
||||
|
||||
|
||||
function verify_prereq() {
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# read command line parameters
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
echo -e "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ "x`which daemon`" == "x" ]; then
|
||||
echo -e "Failed dependency; toaster needs the 'daemon' program in order to be able to start builds'. Please install the 'daemon' program from your distribution repositories or http://www.libslack.org/daemon/" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp/toaster_$$
|
||||
if [ -d "$BUILDDIR" ]; then
|
||||
echo -e "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
mkdir -p "$BUILDDIR"
|
||||
|
||||
RUNNING=1
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
|
||||
function do_cleanup() {
|
||||
find "$BUILDDIR" -type f | xargs rm
|
||||
rmdir "$BUILDDIR"
|
||||
}
|
||||
function cleanup() {
|
||||
if grep -ir error "$BUILDDIR" >/dev/null; then
|
||||
if grep -irn "That port is already in use" "$BUILDDIR"; then
|
||||
echo "You can use the \"webport=PORTNUMBER\" parameter to start Toaster on a different port (port $WEB_PORT is already in use)"
|
||||
do_cleanup
|
||||
else
|
||||
echo -e "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
fi
|
||||
else
|
||||
echo "No errors found, removing the run directory '$BUILDDIR'"
|
||||
do_cleanup
|
||||
fi;
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
cleanup
|
||||
exit 1;
|
||||
fi
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
echo "Starting browser..."
|
||||
xdg-open http://127.0.0.1:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
fi
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Toaster is now running. You can stop it with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds 2>&1 | tee -a "$BUILDDIR/toaster.log"
|
||||
sleep 1
|
||||
done
|
||||
cleanup
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
echo "Error: This script needs to be sourced. Please run as 'source toaster [start|stop]'" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
return 1;
|
||||
fi
|
||||
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,5) == django.VERSION[0:2]" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5. Please install with\n\nsudo pip install django==1.5"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\nsudo pip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
@@ -268,6 +145,15 @@ else
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
@@ -277,54 +163,42 @@ if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
start_success=1
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
unset BBSERVER
|
||||
PREREAD=""
|
||||
if [ -e ${BUILDDIR}/conf/toaster-pre.conf ]; then
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
fi
|
||||
bitbake $PREREAD --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
export BBSERVER=localhost:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >>${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(format=format_str)
|
||||
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
logger.error("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program
|
||||
toasterui.main(mock_connection.connection, mock_connection.events, configParams)
|
||||
@@ -53,6 +53,7 @@ fun! NewBBTemplate()
|
||||
put ='LICENSE = \"\"'
|
||||
put ='SECTION = \"\"'
|
||||
put ='DEPENDS = \"\"'
|
||||
put ='PR = \"r0\"'
|
||||
put =''
|
||||
put ='SRC_URI = \"\"'
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# The Makefile generates an HTML and PDF version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
@@ -21,8 +21,8 @@
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
# The first example generates the HTML and PDF versions of the User Manual.
|
||||
# The second example generates the HTML version only of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@@ -31,9 +31,9 @@ XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
|
||||
@@ -3,15 +3,9 @@
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
<xsl:include href="../template/component.title.xsl"/>
|
||||
<xsl:include href="../template/division.title.xsl"/>
|
||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
||||
|
||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
||||
<xsl:param name="chapter.autolabel" select="1" />
|
||||
<!-- <xsl:param name="appendix.autolabel" select="A" /> -->
|
||||
<xsl:param name="section.autolabel" select="1" />
|
||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
|
||||
<para>
|
||||
The primary purpose for running BitBake is to produce some kind
|
||||
of output such as a single installable package, a kernel, a software
|
||||
development kit, or even a full, board-specific bootable Linux image,
|
||||
complete with bootloader, kernel, and root filesystem.
|
||||
of output such as an image, a kernel, or a software development
|
||||
kit.
|
||||
Of course, you can execute the <filename>bitbake</filename>
|
||||
command with options that cause it to execute single tasks,
|
||||
compile single recipe files, capture or clear data, or simply
|
||||
@@ -27,40 +26,23 @@
|
||||
see
|
||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
||||
section.
|
||||
<note>
|
||||
<para>
|
||||
Prior to executing BitBake, you should take advantage of available
|
||||
parallel thread execution on your build host by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your project's <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common way to determine this value for your build host is to run:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
and count the number of processors displayed. Note that the number of
|
||||
processors will take into account hyper-threading, so that a quad-core
|
||||
build host with hyper-threading will most likely show eight processors,
|
||||
which is the value you would then assign to that variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A possibly simpler solution is that some Linux distributions
|
||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Prior to executing BitBake, you should take advantage of parallel
|
||||
thread execution by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</note>
|
||||
|
||||
<section id='parsing-the-base-configuration-metadata'>
|
||||
<title>Parsing the Base Configuration Metadata</title>
|
||||
|
||||
<para>
|
||||
The first thing BitBake does is parse base configuration
|
||||
metadata.
|
||||
Base configuration metadata consists of your project's
|
||||
Base configuration metadata consists of the
|
||||
<filename>bblayers.conf</filename> file to determine what
|
||||
layers BitBake needs to recognize, all necessary
|
||||
<filename>layer.conf</filename> files (one from each layer),
|
||||
@@ -89,11 +71,10 @@
|
||||
and
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
<filename>BBPATH</filename> is used to search for
|
||||
configuration and class files under the
|
||||
<filename>conf</filename> and <filename>classes</filename>
|
||||
configuration and class files under
|
||||
<filename>conf/</filename> and <filename>class/</filename>
|
||||
directories, respectively.
|
||||
<filename>BBFILES</filename> is used to locate both recipe
|
||||
and recipe append files
|
||||
<filename>BBFILES</filename> is used to find recipe files
|
||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
||||
If there is no <filename>bblayers.conf</filename> file,
|
||||
it is assumed the user has set the <filename>BBPATH</filename>
|
||||
@@ -101,7 +82,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, the <filename>bitbake.conf</filename> file is located
|
||||
Next, the <filename>bitbake.conf</filename> file is searched
|
||||
using the <filename>BBPATH</filename> variable that was
|
||||
just constructed.
|
||||
The <filename>bitbake.conf</filename> file may also include other
|
||||
@@ -136,18 +117,18 @@
|
||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
||||
This file is expected to contain a
|
||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
||||
variable that is a space-delimited list of 'layer' directories.
|
||||
variable that is a space delimited list of 'layer' directories.
|
||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> variables directly in the environment.
|
||||
file then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
||||
file is located and parsed with the
|
||||
file is searched for and parsed with the
|
||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
||||
variable being set to the directory where the layer was found.
|
||||
The idea is these files automatically set up
|
||||
The idea is these files automatically setup
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and other variables correctly for a given build directory.
|
||||
</para>
|
||||
@@ -162,7 +143,7 @@
|
||||
|
||||
<para>
|
||||
Only variable definitions and include directives are allowed
|
||||
in BitBake <filename>.conf</filename> files.
|
||||
in <filename>.conf</filename> files.
|
||||
Some variables directly influence BitBake's behavior.
|
||||
These variables might have been set from the environment
|
||||
depending on the environment variables previously
|
||||
@@ -185,8 +166,7 @@
|
||||
Other classes that are specified in the configuration using the
|
||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
||||
variable are also included.
|
||||
BitBake searches for class files in a
|
||||
<filename>classes</filename> subdirectory under
|
||||
BitBake searches for class files in a "classes" subdirectory under
|
||||
the paths in <filename>BBPATH</filename> in the same way as
|
||||
configuration files.
|
||||
</para>
|
||||
@@ -209,7 +189,7 @@
|
||||
If a recipe uses a closing curly brace within the function and
|
||||
the character has no leading spaces, BitBake produces a parsing
|
||||
error.
|
||||
If you use a pair of curly braces in a shell function, the
|
||||
If you use a pair of curly brace in a shell function, the
|
||||
closing curly brace must not be located at the start of the line
|
||||
without leading spaces.
|
||||
</para>
|
||||
@@ -281,14 +261,14 @@
|
||||
One common convention is to use the recipe filename to define
|
||||
pieces of metadata.
|
||||
For example, in <filename>bitbake.conf</filename> the recipe
|
||||
name and version are used to set the variables
|
||||
name and version set
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
In this example, a recipe called "something_1.2.3.bb" sets
|
||||
<filename>PN</filename> to "something" and
|
||||
<filename>PV</filename> to "1.2.3".
|
||||
</para>
|
||||
@@ -351,55 +331,38 @@
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-providers'>
|
||||
<title>Providers</title>
|
||||
<title>Preferences and Providers</title>
|
||||
|
||||
<para>
|
||||
Assuming BitBake has been instructed to execute a target
|
||||
and that all the recipe files have been parsed, BitBake
|
||||
starts to figure out how to build the target.
|
||||
BitBake looks through the <filename>PROVIDES</filename> list
|
||||
for each of the recipes.
|
||||
A <filename>PROVIDES</filename> list is the list of names by which
|
||||
the recipe can be known.
|
||||
Each recipe's <filename>PROVIDES</filename> list is created
|
||||
implicitly through the recipe's
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
and explicitly through the recipe's
|
||||
BitBake starts by looking through the
|
||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
||||
variable, which is optional.
|
||||
set in recipe files.
|
||||
The default <filename>PROVIDES</filename> for a recipe is its name
|
||||
(<link linkend='var-PN'><filename>PN</filename></link>),
|
||||
however, a recipe can provide multiple things.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
||||
functionality can be found under an alternative name or names other
|
||||
than the implicit <filename>PN</filename> name.
|
||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
||||
contained the following:
|
||||
As an example of adding an extra provider, suppose a recipe named
|
||||
<filename>foo_1.0.bb</filename> contained the following:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "fullkeyboard"
|
||||
PROVIDES += "virtual/bar_1.0"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
||||
Consequently, the functionality found in
|
||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
||||
different names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-preferences'>
|
||||
<title>Preferences</title>
|
||||
|
||||
<para>
|
||||
The <filename>PROVIDES</filename> list is only part of the solution
|
||||
for figuring out a target's recipes.
|
||||
Because targets might have multiple providers, BitBake needs
|
||||
to prioritize providers by determining provider preferences.
|
||||
The recipe now provides both "foo_1.0" and "virtual/bar_1.0".
|
||||
The "virtual/" namespace is often used to denote cases where
|
||||
multiple providers are expected with the user choosing between
|
||||
them.
|
||||
Kernels and toolchain components are common cases of this in
|
||||
OpenEmbedded.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common example in which a target has multiple providers
|
||||
is "virtual/kernel", which is on the
|
||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
||||
Sometimes a target might have multiple providers.
|
||||
A common example is "virtual/kernel", which is provided by each
|
||||
kernel recipe.
|
||||
Each machine often selects the best kernel provider by using a
|
||||
line similar to the following in the machine configuration file:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -414,7 +377,7 @@
|
||||
|
||||
<para>
|
||||
Understanding how providers are chosen is made complicated by the fact
|
||||
that multiple versions might exist for a given provider.
|
||||
that multiple versions might exist.
|
||||
BitBake defaults to the highest version of a provider.
|
||||
Version comparisons are made using the same method as Debian.
|
||||
You can use the
|
||||
@@ -423,19 +386,13 @@
|
||||
You can influence the order by using the
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, files have a preference of "0".
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
recipe unlikely to be used unless it is explicitly referenced.
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
||||
likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any
|
||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
||||
and more experimental recipe versions until they have undergone
|
||||
sufficient testing to be considered stable.
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "1" makes it likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any <filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer and more experimental recipe
|
||||
versions until they have undergone sufficient testing to be considered stable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -444,16 +401,18 @@
|
||||
version, unless otherwise specified.
|
||||
If the recipe in question has a
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
set lower than the other recipes (default is 0), then
|
||||
it will not be selected.
|
||||
set lower than
|
||||
the other recipes (default is 0), then it will not be
|
||||
selected.
|
||||
This allows the person or persons maintaining
|
||||
the repository of recipe files to specify
|
||||
their preference for the default selected version.
|
||||
Additionally, the user can specify their preferred version.
|
||||
In addition, the user can specify their preferred version.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>,
|
||||
then the
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
will be set to “a”, and the
|
||||
<link linkend='var-PV'><filename>PV</filename></link>
|
||||
@@ -461,38 +420,19 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
||||
If we then have a recipe named <filename>a_1.2.bb</filename>, BitBake
|
||||
will choose 1.2 by default.
|
||||
However, if you define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, you
|
||||
can change that preference:
|
||||
However, if we define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, we
|
||||
can change that.
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_VERSION_a = "1.1"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
It is common for a recipe to provide two versions -- a stable,
|
||||
numbered (and preferred) version, and a version that is
|
||||
automatically checked out from a source code repository that
|
||||
is considered more "bleeding edge" but can be selected only
|
||||
explicitly.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For example, in the OpenEmbedded codebase, there is a standard,
|
||||
versioned recipe file for BusyBox,
|
||||
<filename>busybox_1.22.1.bb</filename>,
|
||||
but there is also a Git-based version,
|
||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
||||
<literallayout class='monospaced'>
|
||||
DEFAULT_PREFERENCE = "-1"
|
||||
</literallayout>
|
||||
to ensure that the numbered, stable version is always preferred
|
||||
unless the developer selects otherwise.
|
||||
</para>
|
||||
</note>
|
||||
<para>
|
||||
In summary, BitBake has created a list of providers, which is prioritized, for each target.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-dependencies'>
|
||||
@@ -555,7 +495,7 @@
|
||||
As each task completes, a timestamp is written to the directory specified by the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
||||
On subsequent runs, BitBake looks in the build directory within
|
||||
<filename>tmp/stamps</filename> and does not rerun
|
||||
<filename>tmp/stamps</filename>and does not rerun
|
||||
tasks that are already completed unless a timestamp is found to be invalid.
|
||||
Currently, invalid timestamps are only considered on a per
|
||||
recipe file basis.
|
||||
@@ -595,7 +535,7 @@
|
||||
<title>Executing Tasks</title>
|
||||
|
||||
<para>
|
||||
Tasks can be either a shell task or a Python task.
|
||||
Tasks can either be a shell task or a Python task.
|
||||
For shell tasks, BitBake writes a shell script to
|
||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
||||
and then executes the script.
|
||||
@@ -815,9 +755,9 @@
|
||||
to determine the stamps and delta where these two
|
||||
stamp trees diverge.
|
||||
<note>
|
||||
It is likely that future versions of BitBake will
|
||||
It is likely that future versions of BitBake with
|
||||
provide other signature handlers triggered through
|
||||
additional "-S" parameters.
|
||||
additional "-S" paramters.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
BitBake's fetch module is a standalone piece of library code
|
||||
that deals with the intricacies of downloading source code
|
||||
and files from remote systems.
|
||||
Fetching source code is one of the cornerstones of building software.
|
||||
Fetching source code is one of the corner stones of building software.
|
||||
As such, this module forms an important part of BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The current fetch module is called "fetch2" and refers to the
|
||||
fact that it is the second major version of the API.
|
||||
The original version is obsolete and has been removed from the codebase.
|
||||
The original version is obsolete and removed from the codebase.
|
||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
||||
manual.
|
||||
</para>
|
||||
@@ -60,19 +60,17 @@
|
||||
<note>
|
||||
For convenience, the naming in these examples matches
|
||||
the variables used by OpenEmbedded.
|
||||
If you want to see the above code in action, examine
|
||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
||||
</note>
|
||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
||||
variables are not hardcoded into the fetcher, since those fetcher
|
||||
methods can be (and are) called with different variable names.
|
||||
variables are not coded into the fetcher.
|
||||
They variables can (and are) called with different variable names.
|
||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
||||
the fetch module to fetch the sstate files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When the <filename>download()</filename> method is called,
|
||||
BitBake tries to resolve the URLs by looking for source files
|
||||
BitBake tries to fulfill the URLs by looking for source files
|
||||
in a specific search order:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
||||
@@ -86,7 +84,7 @@
|
||||
<filename>SRC_URI</filename>).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
||||
If fetch failures occur, BitBake next uses mirror locations as
|
||||
If fetch failures occur, BitBake next uses mirror location as
|
||||
defined by the
|
||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
||||
variable.
|
||||
@@ -151,7 +149,7 @@
|
||||
<para>
|
||||
File integrity is of key importance for reproducing builds.
|
||||
For non-local archive downloads, the fetcher code can verify
|
||||
SHA-256 and MD5 checksums to ensure the archives have been
|
||||
sha256 and md5 checksums to ensure the archives have been
|
||||
downloaded correctly.
|
||||
You can specify these checksums by using the
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
@@ -262,8 +260,8 @@
|
||||
<para>
|
||||
This submodule handles URLs that begin with
|
||||
<filename>file://</filename>.
|
||||
The filename you specify within the URL can be
|
||||
either an absolute or relative path to a file.
|
||||
The filename you specify with in the URL can
|
||||
either be an absolute or relative path to a file.
|
||||
If the filename is relative, the contents of the
|
||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
||||
variable is used in the same way
|
||||
@@ -288,45 +286,15 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of example URLs, the first relative and
|
||||
the second absolute:
|
||||
Here are some example URLs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "file://relativefile.patch"
|
||||
SRC_URI = "file://relativefile.patch;this=ignored"
|
||||
SRC_URI = "file:///Users/ich/very_important_software"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
||||
|
||||
@@ -345,7 +313,7 @@
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
<listitem><para><emphasis><filename>CVSDIR</filename>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
@@ -366,7 +334,7 @@
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"method":</emphasis>
|
||||
The protocol over which to communicate with the CVS server.
|
||||
The protocol over which to communicate with the cvs server.
|
||||
By default, this protocol is "pserver".
|
||||
If "method" is set to "ext", BitBake examines the
|
||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
||||
@@ -394,8 +362,7 @@
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
directory relative to <filename>CVSDIR</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
@@ -427,6 +394,36 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to a sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handheld.sorg/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='svn-fetcher'>
|
||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
||||
|
||||
@@ -436,9 +433,9 @@
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
The fetcher's temporary working directory is set
|
||||
by <filename>SVNDIR</filename>, which is usually
|
||||
<filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -469,13 +466,6 @@
|
||||
compile-time when set to "keep".
|
||||
By default, these directories are removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
||||
When required, sets the username for the transport.
|
||||
By default, this parameter is empty.
|
||||
The transport username is different than the username
|
||||
used in the main URL, which is passed to the subversion
|
||||
command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Following are two examples using svn:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -486,15 +476,14 @@
|
||||
</section>
|
||||
|
||||
<section id='git-fetcher'>
|
||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
||||
<title>GIT Fetcher (<filename>git://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
@@ -562,7 +551,7 @@
|
||||
network.
|
||||
For that reason, tags are often not used.
|
||||
As far as Git is concerned, the "tag" parameter behaves
|
||||
effectively the same as the "rev" parameter.
|
||||
effectively the same as the "revision" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"subpath":</emphasis>
|
||||
Limits the checkout to a specific subpath of the tree.
|
||||
@@ -581,116 +570,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='gitsm-fetcher'>
|
||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule inherits from the
|
||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
||||
that fetcher's behavior by fetching a repository's submodules.
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
is passed to the Git fetcher as described in the
|
||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
||||
section.
|
||||
<note>
|
||||
<title>Notes and Warnings</title>
|
||||
<para>
|
||||
You must clean a recipe when switching between
|
||||
'<filename>git://</filename>' and
|
||||
'<filename>gitsm://</filename>' URLs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Git Submodules fetcher is not a complete fetcher
|
||||
implementation.
|
||||
The fetcher has known issues where it does not use the
|
||||
normal source mirroring infrastructure properly.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='clearcase-fetcher'>
|
||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from a
|
||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
||||
repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
which one is available.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are options for the <filename>SRC_URI</filename>
|
||||
statement:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
||||
The name, which must include the
|
||||
prepending "/" character, of the ClearCase VOB.
|
||||
This option is required.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the following load rule in
|
||||
the view config spec:
|
||||
<literallayout class='monospaced'>
|
||||
load <vob><module>
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
<filename>https</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, the fetcher creates a configuration specification.
|
||||
If you want this specification written to an area other than the default,
|
||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
||||
in your recipe to define where the specification is written.
|
||||
<note>
|
||||
the <filename>SRCREV</filename> loses its functionality if you
|
||||
specify this variable.
|
||||
However, <filename>SRCREV</filename> is still used to label the
|
||||
archive after a fetch even though it does not define what is
|
||||
fetched.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of other behaviors worth mentioning:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
When using <filename>cleartool</filename>, the login of
|
||||
<filename>cleartool</filename> is handled by the system.
|
||||
The login require no special steps.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
In order to use <filename>rcleartool</filename> with authenticated
|
||||
users, an "rcleartool login" is necessary before using the fetcher.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
@@ -703,6 +582,9 @@
|
||||
<listitem><para>
|
||||
Perforce (<filename>p4://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Git Submodules (<filename>gitsm://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
|
||||
@@ -135,7 +135,7 @@
|
||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<ulink url="https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
<ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</note>
|
||||
@@ -270,7 +270,7 @@
|
||||
and define some key BitBake variables.
|
||||
For more information on the <filename>bitbake.conf</filename>,
|
||||
see
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
</para>
|
||||
<para>Use the following commands to create the <filename>conf</filename>
|
||||
directory in the project directory:
|
||||
@@ -355,7 +355,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
supporting.
|
||||
For more information on the <filename>base.bbclass</filename> file,
|
||||
you can look at
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>classes/base.bbclass</filename>
|
||||
@@ -377,7 +377,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Thus, this example creates and uses a layer called "mylayer".
|
||||
<note>
|
||||
You can find additional information on adding a layer at
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
<para>Minimally, you need a recipe file and a layer configuration
|
||||
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
Welcome to the BitBake User Manual.
|
||||
This manual provides information on the BitBake tool.
|
||||
The information attempts to be as independent as possible regarding
|
||||
systems that use BitBake, such as OpenEmbedded and the
|
||||
Yocto Project.
|
||||
In some cases, scenarios or examples within the context of
|
||||
systems that use BitBake, such as the Yocto Project and
|
||||
OpenEmbedded.
|
||||
In some cases, scenarios or examples that within the context of
|
||||
a build system are used in the manual to help with understanding.
|
||||
For these cases, the manual clearly states the context.
|
||||
</para>
|
||||
@@ -35,31 +35,28 @@
|
||||
<listitem><para>
|
||||
BitBake executes tasks according to provided
|
||||
metadata that builds up the tasks.
|
||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
||||
and related recipe "append" (<filename>.bbappend</filename>)
|
||||
files, configuration (<filename>.conf</filename>) and
|
||||
underlying include (<filename>.inc</filename>) files, and
|
||||
in class (<filename>.bbclass</filename>) files.
|
||||
The metadata provides
|
||||
Metadata is stored in recipe (<filename>.bb</filename>),
|
||||
configuration (<filename>.conf</filename>), and class
|
||||
(<filename>.bbclass</filename>) files and provides
|
||||
BitBake with instructions on what tasks to run and
|
||||
the dependencies between those tasks.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a fetcher library for obtaining source
|
||||
code from various places such as local files, source control
|
||||
systems, or websites.
|
||||
code from various places such as source control
|
||||
systems or websites.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The instructions for each unit to be built (e.g. a piece
|
||||
of software) are known as "recipe" files and
|
||||
of software) are known as recipe files and
|
||||
contain all the information about the unit
|
||||
(dependencies, source file locations, checksums, description
|
||||
and so on).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a client/server abstraction and can
|
||||
be used from a command line or used as a service over
|
||||
XML-RPC and has several different user interfaces.
|
||||
be used from a command line or used as a service over XMLRPC and
|
||||
has several different user interfaces.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -72,7 +69,7 @@
|
||||
BitBake was originally a part of the OpenEmbedded project.
|
||||
It was inspired by the Portage package management system
|
||||
used by the Gentoo Linux distribution.
|
||||
On December 7, 2004, OpenEmbedded project team member
|
||||
On December 7, 2004, OpenEmbedded project team member,
|
||||
Chris Larson split the project into two distinct pieces:
|
||||
<itemizedlist>
|
||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
||||
@@ -82,11 +79,8 @@
|
||||
Today, BitBake is the primary basis of the
|
||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
||||
project, which is being used to build and maintain Linux
|
||||
distributions such as the
|
||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
||||
and which is also being used as the build tool for Linux projects
|
||||
such as the
|
||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
||||
distributions such as the Angstrom Distribution and which is used
|
||||
as the build tool for Linux projects such as the Yocto Project.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -94,7 +88,7 @@
|
||||
an aspiring embedded Linux distribution.
|
||||
All of the build systems used by traditional desktop Linux
|
||||
distributions lacked important functionality, and none of the
|
||||
ad hoc Buildroot-based systems, prevalent in the
|
||||
ad-hoc Buildroot-based systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.
|
||||
</para>
|
||||
|
||||
@@ -144,7 +138,7 @@
|
||||
projects for their builds.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Provide an inheritance mechanism to share
|
||||
Provide an inheritance mechanism that share
|
||||
common metadata between many packages.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -157,7 +151,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Split metadata into layers and allow layers
|
||||
to enhance or override other layers.
|
||||
to override each other.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Allow representation of a given set of input variables
|
||||
@@ -184,14 +178,14 @@
|
||||
what tasks are required to run, and executes those tasks.
|
||||
Similar to GNU Make, BitBake controls how software is
|
||||
built.
|
||||
GNU Make achieves its control through "makefiles", while
|
||||
GNU Make achieves its control through "makefiles".
|
||||
BitBake uses "recipes".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake extends the capabilities of a simple
|
||||
tool like GNU Make by allowing for the definition of much more
|
||||
complex tasks, such as assembling entire embedded Linux
|
||||
tool like GNU Make by allowing for much more complex tasks
|
||||
to be completed, such as assembling entire embedded Linux
|
||||
distributions.
|
||||
</para>
|
||||
|
||||
@@ -209,20 +203,14 @@
|
||||
<filename>.bb</filename>, are the most basic metadata files.
|
||||
These recipe files provide BitBake with the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>Descriptive information about the
|
||||
package (author, homepage, license, and so on)</para></listitem>
|
||||
<listitem><para>Descriptive information about the package</para></listitem>
|
||||
<listitem><para>The version of the recipe</para></listitem>
|
||||
<listitem><para>Existing dependencies (both build
|
||||
and runtime dependencies)</para></listitem>
|
||||
<listitem><para>Where the source code resides and
|
||||
how to fetch it</para></listitem>
|
||||
<listitem><para>Whether the source code requires
|
||||
any patches, where to find them, and how to apply
|
||||
them</para></listitem>
|
||||
<listitem><para>How to configure and compile the
|
||||
source code</para></listitem>
|
||||
<listitem><para>Existing Dependencies</para></listitem>
|
||||
<listitem><para>Where the source code resides</para></listitem>
|
||||
<listitem><para>Whether the source code requires any patches</para></listitem>
|
||||
<listitem><para>How to compile the source code</para></listitem>
|
||||
<listitem><para>Where on the target machine to install the
|
||||
package or packages created</para></listitem>
|
||||
package being compiled</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -234,11 +222,7 @@
|
||||
The term "package" is also commonly used to describe recipes.
|
||||
However, since the same word is used to describe packaged
|
||||
output from a project, it is best to maintain a single
|
||||
descriptive term - "recipes".
|
||||
Put another way, a single "recipe" file is quite capable
|
||||
of generating a number of related but separately installable
|
||||
"packages".
|
||||
In fact, that ability is fairly common.
|
||||
descriptive term, "recipes".
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
@@ -273,7 +257,7 @@
|
||||
called <filename>base.bbclass</filename>.
|
||||
You can find this file in the
|
||||
<filename>classes</filename> directory.
|
||||
The <filename>base.bbclass</filename> class files is special since it
|
||||
The <filename>base.bbclass</filename> is special since it
|
||||
is always included automatically for all recipes
|
||||
and classes.
|
||||
This class contains definitions for standard basic tasks such
|
||||
@@ -300,8 +284,7 @@
|
||||
To illustrate how you can use layers to keep things modular,
|
||||
consider customizations you might make to support a specific target machine.
|
||||
These types of customizations typically reside in a special layer,
|
||||
rather than a general layer, called a Board Support Package (BSP)
|
||||
Layer.
|
||||
rather than a general layer, called a Board Specific Package (BSP) Layer.
|
||||
Furthermore, the machine customizations should be isolated from
|
||||
recipes and metadata that support a new GUI environment, for
|
||||
example.
|
||||
@@ -321,8 +304,9 @@
|
||||
|
||||
<para>
|
||||
Append files, which are files that have the
|
||||
<filename>.bbappend</filename> file extension, extend or
|
||||
override information in an existing recipe file.
|
||||
<filename>.bbappend</filename> file extension, add or
|
||||
extend build information to an existing
|
||||
recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -335,9 +319,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Information in append files extends or
|
||||
overrides the information in the underlying,
|
||||
similarly-named recipe files.
|
||||
Information in append files overrides the information in the
|
||||
similarly-named recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -362,12 +345,6 @@
|
||||
However, if you named the append file
|
||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the most general case, you could name the append file something as
|
||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
||||
version independent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -436,20 +413,6 @@
|
||||
you have a directory entitled
|
||||
<filename>bitbake-1.17.0</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
||||
Build Checkout:</emphasis>
|
||||
A final possibility for getting a copy of BitBake is that it
|
||||
already comes with your checkout of a larger Bitbake-based build
|
||||
system, such as Poky or Yocto Project.
|
||||
Rather than manually checking out individual layers and
|
||||
gluing them together yourself, you can check
|
||||
out an entire build system.
|
||||
The checkout will already include a version of BitBake that
|
||||
has been thoroughly tested for compatibility with the other
|
||||
components.
|
||||
For information on how to check out a particular BitBake-based
|
||||
build system, consult that build system's supporting documentation.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -508,16 +471,14 @@
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run Don't execute, just go through the motions.
|
||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
||||
-S DUMP_SIGNATURES, --dump-signatures=DUMP_SIGNATURES
|
||||
Dump out the signature construction information, with
|
||||
no task execution. The SIGNATURE_HANDLER parameter is
|
||||
passed to the handler. Two common values are none and
|
||||
printdiff but the handler may define more/less. none
|
||||
means only dump the signature, printdiff means compare
|
||||
the dumped signature with the cached one.
|
||||
no task execution. Parameters are passed to the
|
||||
signature handling code, use 'none' if no specific
|
||||
handler is required.
|
||||
-p, --parse-only Quit after parsing the BB recipes.
|
||||
-s, --show-versions Show current and preferred versions of all recipes.
|
||||
-e, --environment Show the global or per-recipe environment complete
|
||||
-e, --environment Show the global or per-package environment complete
|
||||
with information about where variables were
|
||||
set/changed.
|
||||
-g, --graphviz Save dependency tree information for the specified
|
||||
@@ -532,8 +493,6 @@
|
||||
-u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server to use, process or xmlrpc.
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
floating revisions have changed or not.
|
||||
--server-only Run bitbake without a UI, only starting a server
|
||||
@@ -600,14 +559,14 @@
|
||||
when one wants to manage multiple <filename>.bb</filename>
|
||||
files.
|
||||
Clearly there needs to be a way to tell BitBake what
|
||||
files are available and, of those, which you
|
||||
files are available, and of those, which you
|
||||
want to execute.
|
||||
There also needs to be a way for each recipe
|
||||
to express its dependencies, both for build-time and
|
||||
runtime.
|
||||
There must be a way for you to express recipe preferences
|
||||
when multiple recipes provide the same functionality, or when
|
||||
there are multiple versions of a recipe.
|
||||
there are multiple versions of a recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -159,10 +159,6 @@
|
||||
using the "+=" and "=+" operators.
|
||||
These operators insert a space between the current
|
||||
value and prepended or appended value.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -182,10 +178,6 @@
|
||||
<para>
|
||||
If you want to append or prepend values without an
|
||||
inserted space, use the ".=" and "=." operators.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -206,13 +198,6 @@
|
||||
You can also append and prepend a variable's value
|
||||
using an override style syntax.
|
||||
When you use this syntax, no spaces are inserted.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators differ from the ":=", ".=", "=.", "+=", and "=+"
|
||||
operators in that their effects are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -232,6 +217,13 @@
|
||||
override syntax.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The operators "_append" and "_prepend" differ from
|
||||
the operators ".=" and "=." in that they are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='removing-override-style-syntax'>
|
||||
@@ -291,18 +283,7 @@
|
||||
The variable <filename>FOO</filename> has two flags:
|
||||
<filename>a</filename> and <filename>b</filename>.
|
||||
The flags are immediately set to "abc" and "123", respectively.
|
||||
The <filename>a</filename> flag becomes "abc 456".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
No need exists to pre-define variable flags.
|
||||
You can simply start using them.
|
||||
One extremely common application
|
||||
is to attach some brief documentation to a BitBake variable as
|
||||
follows:
|
||||
<literallayout class='monospaced'>
|
||||
CACHE[doc] = "The directory holding the cache of the metadata."
|
||||
</literallayout>
|
||||
The <filename>a</filename> flag becomes "abc456".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -317,19 +298,7 @@
|
||||
DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
|
||||
</literallayout>
|
||||
This example results in the <filename>DATE</filename>
|
||||
variable being set to the current date.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Probably the most common use of this feature is to extract
|
||||
the value of variables from BitBake's internal data dictionary,
|
||||
<filename>d</filename>.
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
variable becoming the current date.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -403,25 +372,6 @@
|
||||
You select the os-specific version of the <filename>TEST</filename>
|
||||
variable by appending the "os" override to the variable
|
||||
(i.e.<filename>TEST_os</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To better understand this, consider a practical example
|
||||
that assumes an OpenEmbedded metadata-based Linux
|
||||
kernel recipe file.
|
||||
The following lines from the recipe file first set
|
||||
the kernel branch variable <filename>KBRANCH</filename>
|
||||
to a default value, then conditionally override that
|
||||
value based on the architecture of the build:
|
||||
<literallayout class='monospaced'>
|
||||
KBRANCH = "standard/base"
|
||||
KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
|
||||
KBRANCH_qemumips = "standard/mti-malta32"
|
||||
KBRANCH_qemuppc = "standard/qemuppc"
|
||||
KBRANCH_qemux86 = "standard/common-pc/base"
|
||||
KBRANCH_qemux86-64 = "standard/common-pc-64/base"
|
||||
KBRANCH_qemumips64 = "standard/mti-malta64"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Appending and Prepending:</emphasis>
|
||||
BitBake also supports append and prepend operations to
|
||||
@@ -435,19 +385,6 @@
|
||||
</literallayout>
|
||||
In this example, <filename>DEPENDS</filename> becomes
|
||||
"glibc ncurses libmad".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Again, using an OpenEmbedded metadata-based
|
||||
kernel recipe file as an example, the
|
||||
following lines will conditionally append to the
|
||||
<filename>KERNEL_FEATURES</filename> variable based
|
||||
on the architecture:
|
||||
<literallayout class='monospaced'>
|
||||
KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -1140,6 +1077,8 @@
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BB_ORIGENV - add example?
|
||||
|
||||
origenv = d.getVar("BB_ORIGENV", False)
|
||||
bar = origenv.getVar("BAR", False)
|
||||
</literallayout>
|
||||
@@ -1201,6 +1140,12 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>fakeroot:</emphasis>
|
||||
Causes a task to be run in a fakeroot environment,
|
||||
obtained by adding the variables in
|
||||
<link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
|
||||
to the environment.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
@@ -1306,8 +1251,8 @@
|
||||
BitBake allows installation of event handlers within
|
||||
recipe and class files.
|
||||
Events are triggered at certain points during operation,
|
||||
such as the beginning of an operation against a given recipe
|
||||
(<filename>*.bb</filename> file), the start of a given task,
|
||||
such as the beginning of operation against a given
|
||||
<filename>.bb</filename>, the start of a given task,
|
||||
task failure, task success, and so forth.
|
||||
The intent is to make it easy to do things like email
|
||||
notification on build failure.
|
||||
@@ -1335,27 +1280,6 @@
|
||||
the name of the triggered event.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because you probably are only interested in a subset of events,
|
||||
you would likely use the <filename>[eventmask]</filename> flag
|
||||
for your event handler to be sure that only certain events
|
||||
trigger the handler.
|
||||
Given the previous example, suppose you only wanted the
|
||||
<filename>bb.build.TaskFailed</filename> event to trigger that
|
||||
event handler.
|
||||
Use the flag as follows:
|
||||
<literallayout class='monospaced'>
|
||||
addhandler myclass_eventhandler
|
||||
myclass_eventhandler[eventmask] = "bb.build.TaskFailed"
|
||||
python myclass_eventhandler() {
|
||||
from bb.event import getName
|
||||
from bb import data
|
||||
print("The name of the Event is %s" % getName(e))
|
||||
print("The file we run for is %s" % data.getVar('FILE', e.data, True))
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
During a standard build, the following common events might occur:
|
||||
<itemizedlist>
|
||||
@@ -1542,9 +1466,9 @@
|
||||
complete before that task can be executed.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_configure[deptask] = "do_populate_sysroot"
|
||||
do_configure[deptask] = "do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of each item in <filename>DEPENDS</filename> must complete before
|
||||
<filename>do_configure</filename> can execute.
|
||||
</para>
|
||||
@@ -1570,11 +1494,11 @@
|
||||
item runtime dependency which must have completed before that
|
||||
task can be executed.
|
||||
<literallayout class='monospaced'>
|
||||
do_package_qa[rdeptask] = "do_packagedata"
|
||||
do_package_write[rdeptask] = "do_package"
|
||||
</literallayout>
|
||||
In the previous example, the <filename>do_packagedata</filename>
|
||||
In the previous example, the <filename>do_package</filename>
|
||||
task of each item in <filename>RDEPENDS</filename> must have
|
||||
completed before <filename>do_package_qa</filename> can execute.
|
||||
completed before <filename>do_package_write</filename> can execute.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -1619,9 +1543,9 @@
|
||||
the data in <filename>DEPENDS</filename>.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_patch[depends] = "quilt-native:do_populate_sysroot"
|
||||
do_patch[depends] = "quilt-native:do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of the target <filename>quilt-native</filename>
|
||||
must have completed before the
|
||||
<filename>do_patch</filename> task can execute.
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
<link linkend='var-DEFAULT_PREFERENCE'>D</link>
|
||||
<link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
|
||||
<link linkend='var-FAKEROOT'>F</link>
|
||||
<link linkend='var-GITDIR'>G</link>
|
||||
<link linkend='var-HGDIR'>H</link>
|
||||
<!-- <link linkend='var-GROUPADD_PARAM'>G</link> -->
|
||||
<link linkend='var-HOMEPAGE'>H</link>
|
||||
<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
|
||||
<!-- <link linkend='var-glossary-j'>J</link> -->
|
||||
<!-- <link linkend='var-KARCH'>K</link> -->
|
||||
@@ -1154,15 +1154,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Bazaar
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-c'><title>C</title>
|
||||
@@ -1177,15 +1168,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out under the
|
||||
CVS system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-d'><title>D</title>
|
||||
@@ -1430,32 +1412,13 @@
|
||||
|
||||
</glossdiv>
|
||||
|
||||
|
||||
<!--
|
||||
<glossdiv id='var-glossary-g'><title>G</title>
|
||||
|
||||
<glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Git repository
|
||||
is stored when it is cloned.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
-->
|
||||
|
||||
<glossdiv id='var-glossary-h'><title>H</title>
|
||||
|
||||
<glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Mercurial
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
|
||||
<glossdef>
|
||||
<para>Website where more information about the software the recipe is building
|
||||
@@ -1597,16 +1560,8 @@
|
||||
BitBake uses <filename>OVERRIDES</filename> to control
|
||||
what variables are overridden after BitBake parses
|
||||
recipes and configuration files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following is a simple example that uses an overrides
|
||||
list based on machine architectures:
|
||||
<literallayout class='monospaced'>
|
||||
OVERRIDES = "arm:x86:mips:powerpc"
|
||||
</literallayout>
|
||||
You can find information on how to use
|
||||
<filename>OVERRIDES</filename> in the
|
||||
You can find more information on how overrides are handled
|
||||
in the
|
||||
"<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
|
||||
section.
|
||||
</para>
|
||||
@@ -1789,28 +1744,16 @@
|
||||
<glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
A list of aliases by which a particular recipe can be
|
||||
known.
|
||||
By default, a recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its <filename>PROVIDES</filename>
|
||||
list.
|
||||
If a recipe uses <filename>PROVIDES</filename>, the
|
||||
additional aliases are synonyms for the recipe and can
|
||||
be useful satisfying dependencies of other recipes during
|
||||
the build as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Consider the following example
|
||||
<filename>PROVIDES</filename> statement from a recipe
|
||||
file <filename>libav_0.8.11.bb</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "libpostproc"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
A list of aliases that a recipe also provides.
|
||||
These aliases are useful for satisfying dependencies of
|
||||
other recipes during the build (as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>).
|
||||
<note>
|
||||
A recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its
|
||||
<filename>PROVIDES</filename> list.
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
@@ -2137,15 +2080,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Subversion
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-t'><title>T</title>
|
||||
|
||||
@@ -313,13 +313,6 @@ a:hover {
|
||||
/*font-weight: bold;*/
|
||||
}
|
||||
|
||||
/* This style defines how the permalink character
|
||||
appears by itself and when hovered over with
|
||||
the mouse. */
|
||||
|
||||
[alt='Permalink'] { color: #eee; }
|
||||
[alt='Permalink']:hover { color: black; }
|
||||
|
||||
|
||||
div.informalfigure,
|
||||
div.informalexample,
|
||||
@@ -800,6 +793,7 @@ div.sect2 .titlepage .title {
|
||||
|
||||
h1.title {
|
||||
background-color: transparent;
|
||||
background-image: url("figures/yocto-project-bw.png");
|
||||
background-repeat: no-repeat;
|
||||
height: 256px;
|
||||
text-indent: -9000px;
|
||||
|
||||
@@ -56,7 +56,7 @@
|
||||
-->
|
||||
|
||||
<copyright>
|
||||
<year>2004-2015</year>
|
||||
<year>2004-2014</year>
|
||||
<holder>Richard Purdie</holder>
|
||||
<holder>Chris Larson</holder>
|
||||
<holder>and Phil Blundell</holder>
|
||||
|
||||
@@ -89,7 +89,7 @@ quit after parsing the BB files (developers only)
|
||||
show current and preferred versions of all packages
|
||||
.TP
|
||||
.B \-e, \-\-environment
|
||||
show the global or per-recipe environment (this is what used to be bbread)
|
||||
show the global or per-package environment (this is what used to be bbread)
|
||||
.TP
|
||||
.B \-g, \-\-graphviz
|
||||
emit the dependency trees of the specified packages in the dot syntax
|
||||
|
||||
39
bitbake/doc/template/component.title.xsl
vendored
39
bitbake/doc/template/component.title.xsl
vendored
@@ -1,39 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="component.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="ancestor::d:section">
|
||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
||||
</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
||||
<xsl:otherwise>1</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:if test="$generate.id.attributes = 0">
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:element>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/division.title.xsl
vendored
25
bitbake/doc/template/division.title.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="division.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<h1>
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</h1>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
|
||||
21
bitbake/doc/template/formal.object.heading.xsl
vendored
21
bitbake/doc/template/formal.object.heading.xsl
vendored
@@ -1,21 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="formal.object.heading">
|
||||
<xsl:param name="object" select="."/>
|
||||
<xsl:param name="title">
|
||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
</xsl:param>
|
||||
<p class="title">
|
||||
<b><xsl:copy-of select="$title"/></b>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$object"/>
|
||||
</xsl:call-template>
|
||||
</p>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
@@ -1,14 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<xsl:template match="glossentry/glossterm">
|
||||
<xsl:apply-imports/>
|
||||
<xsl:if test="$generate.permalink != 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select=".."/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/permalinks.xsl
vendored
25
bitbake/doc/template/permalinks.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
|
||||
<xsl:param name="generate.permalink" select="1"/>
|
||||
<xsl:param name="permalink.text">¶</xsl:param>
|
||||
|
||||
<xsl:template name="permalink">
|
||||
<xsl:param name="node"/>
|
||||
|
||||
<xsl:if test="$generate.permalink != '0'">
|
||||
<span class="permalink">
|
||||
<a alt="Permalink" title="Permalink">
|
||||
<xsl:attribute name="href">
|
||||
<xsl:call-template name="href.target">
|
||||
<xsl:with-param name="object" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:attribute>
|
||||
<xsl:copy-of select="$permalink.text"/>
|
||||
</a>
|
||||
</span>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
55
bitbake/doc/template/section.title.xsl
vendored
55
bitbake/doc/template/section.title.xsl
vendored
@@ -1,55 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="section.title">
|
||||
<xsl:variable name="section"
|
||||
select="(ancestor::section |
|
||||
ancestor::simplesect|
|
||||
ancestor::sect1|
|
||||
ancestor::sect2|
|
||||
ancestor::sect3|
|
||||
ancestor::sect4|
|
||||
ancestor::sect5)[last()]"/>
|
||||
|
||||
<xsl:variable name="renderas">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
|
||||
<xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$renderas != ''">
|
||||
<xsl:value-of select="$renderas"/>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:call-template name="section.level">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:call-template name="section.heading">
|
||||
<xsl:with-param name="section" select="$section"/>
|
||||
<xsl:with-param name="level" select="$level"/>
|
||||
<xsl:with-param name="title">
|
||||
<xsl:apply-templates select="$section" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:if test="$level > 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:with-param>
|
||||
</xsl:call-template>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.26.0"
|
||||
__version__ = "1.22.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
@@ -99,11 +99,12 @@ def error(*args):
|
||||
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
raise BBHandledException()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
"""This is a decorator which can be used to mark functions
|
||||
as deprecated. It will result in a warning being emitted
|
||||
as deprecated. It will result in a warning being emmitted
|
||||
when the function is used."""
|
||||
import warnings
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -42,8 +42,9 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
|
||||
# When we execute a python function we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
__builtins__['bb'] = bb
|
||||
@@ -142,7 +143,7 @@ class LogTee(object):
|
||||
self.outfile.flush()
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
"""Execute an BB 'function'"""
|
||||
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
@@ -227,7 +228,7 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
with open(runfile, 'w') as script:
|
||||
bb.data.emit_func_python(func, script, d)
|
||||
script.write(code)
|
||||
|
||||
if cwd:
|
||||
try:
|
||||
@@ -241,9 +242,10 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
||||
raise
|
||||
except:
|
||||
if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
|
||||
raise
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
@@ -416,7 +418,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
os.dup2(logfile.fileno(), oso[1])
|
||||
os.dup2(logfile.fileno(), ose[1])
|
||||
|
||||
# Ensure Python logging goes to the logfile
|
||||
# Ensure python logging goes to the logfile
|
||||
handler = logging.StreamHandler(logfile)
|
||||
handler.setFormatter(logformatter)
|
||||
# Always enable full debug output into task logfiles
|
||||
@@ -505,7 +507,7 @@ def exec_task(fn, task, d, profile = False):
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
def stamp_internal(taskname, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Makes sure the stamp directory exists
|
||||
@@ -526,9 +528,6 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if baseonly:
|
||||
return stamp
|
||||
|
||||
if not stamp:
|
||||
return
|
||||
|
||||
@@ -593,9 +592,8 @@ def make_stamp(task, d, file_name = None):
|
||||
# If we're in task context, write out a signature file for each task
|
||||
# as it completes
|
||||
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
||||
stampbase = stamp_internal(task, d, None, True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
|
||||
|
||||
def del_stamp(task, d, file_name = None):
|
||||
"""
|
||||
|
||||
@@ -43,7 +43,7 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "148"
|
||||
__cache_version__ = "147"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -225,16 +225,14 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
for package in self.packages_dynamic:
|
||||
cachedata.packages_dynamic[package].append(fn)
|
||||
|
||||
# Build hash of runtime depends and recommends
|
||||
# Build hash of runtime depends and rececommends
|
||||
for package in self.packages + [self.pn]:
|
||||
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
||||
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if self.not_world:
|
||||
logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
|
||||
else:
|
||||
if not self.not_world:
|
||||
cachedata.possible_world.append(fn)
|
||||
|
||||
# create a collection of all targets for sanity checking
|
||||
@@ -261,7 +259,7 @@ class Cache(object):
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used later for deciding whether we
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
@@ -529,11 +527,8 @@ class Cache(object):
|
||||
if hasattr(info_array[0], 'file_checksums'):
|
||||
for _, fl in info_array[0].file_checksums.items():
|
||||
for f in fl.split():
|
||||
if "*" in f:
|
||||
continue
|
||||
f, exist = f.split(":")
|
||||
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s changed",
|
||||
if not ('*' in f or os.path.exists(f)):
|
||||
logger.debug(2, "Cache: %s's file checksum list file %s was removed",
|
||||
fn, f)
|
||||
self.remove(fn)
|
||||
return False
|
||||
@@ -623,13 +618,10 @@ class Cache(object):
|
||||
def mtime(cachefile):
|
||||
return bb.parse.cached_mtime_noerror(cachefile)
|
||||
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
|
||||
def add_info(self, filename, info_array, cacheData, parsed=None):
|
||||
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
|
||||
cacheData.add_from_recipeinfo(filename, info_array)
|
||||
|
||||
if watcher:
|
||||
watcher(info_array[0].file_depends)
|
||||
|
||||
if not self.has_cache:
|
||||
return
|
||||
|
||||
@@ -700,7 +692,7 @@ def init(cooker):
|
||||
|
||||
* Its mtime
|
||||
* The mtimes of all its dependencies
|
||||
* Whether it caused a parse.SkipRecipe exception
|
||||
* Whether it caused a parse.SkipPackage exception
|
||||
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
@@ -770,6 +762,16 @@ class MultiProcessCache(object):
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
@@ -810,7 +812,15 @@ class MultiProcessCache(object):
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
data = self.cachedata
|
||||
try:
|
||||
with open(self.cachefile, "rb") as f:
|
||||
p = pickle.Unpickler(f)
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
@@ -819,16 +829,16 @@ class MultiProcessCache(object):
|
||||
p = pickle.Unpickler(fd)
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
os.unlink(f)
|
||||
continue
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
os.unlink(f)
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
with open(self.cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
@@ -33,82 +33,9 @@ def check_indent(codestr):
|
||||
return codestr
|
||||
|
||||
|
||||
# Basically pickle, in python 2.7.3 at least, does badly with data duplication
|
||||
# upon pickling and unpickling. Combine this with duplicate objects and things
|
||||
# are a mess.
|
||||
#
|
||||
# When the sets are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file.
|
||||
#
|
||||
# By having shell and python cacheline objects with setstate/getstate, we force
|
||||
# the object creation through our own routine where we can call intern (via internSet).
|
||||
#
|
||||
# We also use hashable frozensets and ensure we use references to these so that
|
||||
# duplicates can be removed, both in memory and in the resulting pickled data.
|
||||
#
|
||||
# By playing these games, the size of the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. Smaller cache files, faster load times and lower memory usage is good.
|
||||
#
|
||||
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
|
||||
# avoiding duplication of the attribute names!
|
||||
|
||||
class SetCache(object):
|
||||
def __init__(self):
|
||||
self.setcache = {}
|
||||
|
||||
def internSet(self, items):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
|
||||
class pythonCacheLine(object):
|
||||
def __init__(self, refs, execs, contains):
|
||||
self.refs = codecache.internSet(refs)
|
||||
self.execs = codecache.internSet(execs)
|
||||
self.contains = {}
|
||||
for c in contains:
|
||||
self.contains[c] = codecache.internSet(contains[c])
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.refs, self.execs, self.contains)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(refs, execs, contains) = state
|
||||
self.__init__(refs, execs, contains)
|
||||
def __hash__(self):
|
||||
l = (hash(self.refs), hash(self.execs))
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
self.execs = codecache.internSet(execs)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.execs)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(execs) = state
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 7
|
||||
CACHE_VERSION = 6
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -117,27 +44,6 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
# To avoid duplication in the codeparser cache, keep
|
||||
# a lookup of hashes of objects we already have
|
||||
self.pythoncachelines = {}
|
||||
self.shellcachelines = {}
|
||||
|
||||
def newPythonCacheLine(self, refs, execs, contains):
|
||||
cacheline = pythonCacheLine(refs, execs, contains)
|
||||
h = hash(cacheline)
|
||||
if h in self.pythoncachelines:
|
||||
return self.pythoncachelines[h]
|
||||
self.pythoncachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def newShellCacheLine(self, execs):
|
||||
cacheline = shellCacheLine(execs)
|
||||
h = hash(cacheline)
|
||||
if h in self.shellcachelines:
|
||||
return self.shellcachelines[h]
|
||||
self.shellcachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def init_cache(self, d):
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
@@ -145,6 +51,25 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for k in data[0][h]["contains"]:
|
||||
data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
@@ -178,7 +103,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -243,19 +168,15 @@ class PythonParser():
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncache[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncache[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncache[h]["contains"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = set(codeparsercache.pythoncacheextras[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncacheextras[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncacheextras[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncacheextras[h]["contains"]
|
||||
return
|
||||
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
@@ -267,7 +188,10 @@ class PythonParser():
|
||||
|
||||
self.execs.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
codeparsercache.pythoncacheextras[h]["contains"] = self.contains
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -286,17 +210,18 @@ class ShellParser():
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = set(codeparsercache.shellcacheextras[h].execs)
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
self._parse_shell(value)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -86,10 +86,7 @@ class Command:
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
|
||||
# updateCache will trigger a shutdown of the parser
|
||||
# and then raise BBHandledException triggering an exit
|
||||
self.cooker.updateCache()
|
||||
if self.cooker.state == bb.cooker.state.error:
|
||||
return False
|
||||
if self.currentAsyncCommand is not None:
|
||||
(command, options) = self.currentAsyncCommand
|
||||
@@ -123,11 +120,11 @@ class Command:
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
if msg or msg == "":
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.event_data)
|
||||
elif code:
|
||||
bb.event.fire(CommandExit(code), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandExit(code), self.cooker.event_data)
|
||||
else:
|
||||
bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandCompleted(), self.cooker.event_data)
|
||||
self.currentAsyncCommand = None
|
||||
self.cooker.finishcommand()
|
||||
|
||||
@@ -271,11 +268,6 @@ class CommandsSync:
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
|
||||
@@ -38,10 +38,7 @@ import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
|
||||
import Queue
|
||||
import signal
|
||||
import subprocess
|
||||
import errno
|
||||
import prserv.serv
|
||||
import pyinotify
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
collectlog = logging.getLogger("BitBake.Collection")
|
||||
@@ -123,34 +120,8 @@ class BBCooker:
|
||||
|
||||
self.configuration = configuration
|
||||
|
||||
self.configwatcher = pyinotify.WatchManager()
|
||||
self.configwatcher.bbseen = []
|
||||
self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
|
||||
self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
|
||||
pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
|
||||
pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
|
||||
self.watcher = pyinotify.WatchManager()
|
||||
self.watcher.bbseen = []
|
||||
self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
|
||||
|
||||
|
||||
self.initConfigurationData()
|
||||
|
||||
self.inotify_modified_files = []
|
||||
|
||||
def _process_inotify_updates(server, notifier_list, abort):
|
||||
for n in notifier_list:
|
||||
if n.check_events(timeout=0):
|
||||
# read notified events and enqeue them
|
||||
n.read_events()
|
||||
n.process_events()
|
||||
return 1.0
|
||||
|
||||
self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
|
||||
|
||||
self.baseconfig_valid = True
|
||||
self.parsecache_valid = False
|
||||
|
||||
# Take a lock so only one copy of bitbake can run against a given build
|
||||
# directory at a time
|
||||
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
|
||||
@@ -182,62 +153,20 @@ class BBCooker:
|
||||
self.parser = None
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def config_notifications(self, event):
|
||||
if not event.path in self.inotify_modified_files:
|
||||
self.inotify_modified_files.append(event.path)
|
||||
self.baseconfig_valid = False
|
||||
|
||||
def notifications(self, event):
|
||||
if not event.path in self.inotify_modified_files:
|
||||
self.inotify_modified_files.append(event.path)
|
||||
self.parsecache_valid = False
|
||||
|
||||
def add_filewatch(self, deps, watcher=None):
|
||||
if not watcher:
|
||||
watcher = self.watcher
|
||||
for i in deps:
|
||||
f = os.path.dirname(i[0])
|
||||
if f in watcher.bbseen:
|
||||
continue
|
||||
watcher.bbseen.append(f)
|
||||
while True:
|
||||
# We try and add watches for files that don't exist but if they did, would influence
|
||||
# the parser. The parent directory of these files may not exist, in which case we need
|
||||
# to watch any parent that does exist for changes.
|
||||
try:
|
||||
watcher.add_watch(f, self.watchmask, quiet=False)
|
||||
break
|
||||
except pyinotify.WatchManagerError as e:
|
||||
if 'ENOENT' in str(e):
|
||||
f = os.path.dirname(f)
|
||||
watcher.bbseen.append(f)
|
||||
continue
|
||||
if 'ENOSPC' in str(e):
|
||||
providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
|
||||
providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
|
||||
providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
|
||||
providerlog.error("Root privilege is required to modify max_user_watches.")
|
||||
raise
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Cooker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
self.state = state.forceshutdown
|
||||
|
||||
def setFeatures(self, features):
|
||||
# we only accept a new feature set if we're in state initial, so we can reset without problems
|
||||
if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
|
||||
if self.state != state.initial:
|
||||
raise Exception("Illegal state for feature set change")
|
||||
original_featureset = list(self.featureset)
|
||||
for feature in features:
|
||||
self.featureset.setFeature(feature)
|
||||
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
|
||||
if (original_featureset != list(self.featureset)) and self.state != state.error:
|
||||
if (original_featureset != list(self.featureset)):
|
||||
self.reset()
|
||||
|
||||
def initConfigurationData(self):
|
||||
@@ -271,82 +200,12 @@ class BBCooker:
|
||||
self.data = self.databuilder.data
|
||||
self.data_hash = self.databuilder.data_hash
|
||||
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
import json, pickle
|
||||
DEFAULT_EVENTFILE = self.configuration.writeeventlog
|
||||
class EventLogWriteHandler():
|
||||
|
||||
class EventWriter():
|
||||
def __init__(self, cooker):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.event_queue = []
|
||||
|
||||
def init_file(self):
|
||||
try:
|
||||
# delete the old log
|
||||
os.remove(DEFAULT_EVENTFILE)
|
||||
except:
|
||||
pass
|
||||
|
||||
# write current configuration data
|
||||
with open(DEFAULT_EVENTFILE, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
def write_event(self, event):
|
||||
with open(DEFAULT_EVENTFILE, "a") as f:
|
||||
try:
|
||||
f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(e, traceback.format_exc(e))
|
||||
|
||||
|
||||
def send(self, event):
|
||||
event_class = event.__module__ + "." + event.__class__.__name__
|
||||
|
||||
# init on bb.event.BuildStarted
|
||||
if self.file_inited is None:
|
||||
if event_class == "bb.event.BuildStarted":
|
||||
self.init_file()
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for e in self.event_queue:
|
||||
self.write_event(e)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
else:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
|
||||
# set our handler's event processor
|
||||
event = EventWriter(self) # self is the cooker here
|
||||
|
||||
|
||||
# set up cooker features for this mock UI handler
|
||||
|
||||
# we need to write the dependency tree in the log
|
||||
self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
|
||||
# register the log file writer as UI Handler
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler())
|
||||
|
||||
|
||||
#
|
||||
# Copy of the data store which has been expanded.
|
||||
# Used for firing events and accessing variables where expansion needs to be accounted for
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.expanded_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.expanded_data)
|
||||
bb.parse.init_parser(self.expanded_data)
|
||||
self.event_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.event_data)
|
||||
bb.parse.init_parser(self.event_data)
|
||||
|
||||
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
|
||||
self.disableDataTracking()
|
||||
@@ -381,7 +240,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
|
||||
self.data.appendVar(var, val, **loginfo)
|
||||
|
||||
def saveConfigurationVar(self, var, val, default_file, op):
|
||||
@@ -450,7 +309,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
|
||||
self.data.setVar(var, val, **loginfo)
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
@@ -512,30 +371,6 @@ class BBCooker:
|
||||
|
||||
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def updateConfigOpts(self, options, environment):
|
||||
for o in options:
|
||||
setattr(self.configuration, o, options[o])
|
||||
clean = True
|
||||
for k in bb.utils.approved_variables():
|
||||
if k in environment and k not in self.configuration.env:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if k in self.configuration.env and k not in environment:
|
||||
logger.debug(1, "Updating environment variable %s (deleted)" % (k))
|
||||
del self.configuration.env[k]
|
||||
clean = False
|
||||
if k not in self.configuration.env and k not in environment:
|
||||
continue
|
||||
if environment[k] != self.configuration.env[k]:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if not clean:
|
||||
logger.debug(1, "Base environment change, triggering reparse")
|
||||
self.baseconfig_valid = False
|
||||
self.reset()
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
Run any queued asynchronous command
|
||||
@@ -567,7 +402,7 @@ class BBCooker:
|
||||
|
||||
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
|
||||
"""
|
||||
Show the outer or per-recipe environment
|
||||
Show the outer or per-package environment
|
||||
"""
|
||||
fn = None
|
||||
envdata = None
|
||||
@@ -581,7 +416,7 @@ class BBCooker:
|
||||
fn = self.matchFile(fn)
|
||||
fn = bb.cache.Cache.realfn2virtual(fn, cls)
|
||||
elif len(pkgs_to_build) == 1:
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
if pkgs_to_build[0] in set(ignore.split()):
|
||||
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
|
||||
|
||||
@@ -661,7 +496,7 @@ class BBCooker:
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
|
||||
######## WARNING : this function requires cache_extra to be enabled ########
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -983,6 +818,7 @@ class BBCooker:
|
||||
or to find all machine configuration files one could call:
|
||||
findFilesMatchingInDir(self, 'conf/machines', 'conf')
|
||||
"""
|
||||
import re
|
||||
|
||||
matches = []
|
||||
p = re.compile(re.escape(filepattern))
|
||||
@@ -1113,30 +949,42 @@ class BBCooker:
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
|
||||
if deps:
|
||||
try:
|
||||
deplist = bb.utils.explode_dep_versions2(deps)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
for dep, oplist in deplist.iteritems():
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
for dep in deplist:
|
||||
depsplit = dep.split(':')
|
||||
if len(depsplit) > 1:
|
||||
try:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
dep = depsplit[0]
|
||||
depnamelist.append(dep)
|
||||
|
||||
if dep in collection_list:
|
||||
for opstr in oplist:
|
||||
if depver:
|
||||
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
(op, depver) = opstr.split()
|
||||
if layerver:
|
||||
try:
|
||||
res = bb.utils.vercmp_string_op(layerver, depver, op)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
if not res:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver != depver:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
|
||||
errors = True
|
||||
collection_depends[c] = deplist.keys()
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
|
||||
@@ -1188,7 +1036,7 @@ class BBCooker:
|
||||
bf = os.path.abspath(bf)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
@@ -1278,7 +1126,7 @@ class BBCooker:
|
||||
taskdata.add_provider(self.data, self.recipecache, item)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
runlist = [[item, "do_%s" % task]]
|
||||
@@ -1305,7 +1153,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1380,6 +1228,7 @@ class BBCooker:
|
||||
'''
|
||||
Create a new image with a "require"/"inherit" base_image statement
|
||||
'''
|
||||
import re
|
||||
if timestamp:
|
||||
image_name = os.path.splitext(image)[0]
|
||||
timestr = time.strftime("-%Y%m%d-%H%M%S")
|
||||
@@ -1430,45 +1279,29 @@ class BBCooker:
|
||||
if self.state == state.running:
|
||||
return
|
||||
|
||||
if self.state in (state.shutdown, state.forceshutdown, state.error):
|
||||
if self.state in (state.shutdown, state.forceshutdown):
|
||||
if hasattr(self.parser, 'shutdown'):
|
||||
self.parser.shutdown(clean=False, force = True)
|
||||
raise bb.BBHandledException()
|
||||
|
||||
if self.state != state.parsing:
|
||||
|
||||
# reload files for which we got notifications
|
||||
for p in self.inotify_modified_files:
|
||||
bb.parse.update_cache(p)
|
||||
self.inotify_modified_files = []
|
||||
|
||||
if not self.baseconfig_valid:
|
||||
logger.debug(1, "Reloading base configuration data")
|
||||
self.initConfigurationData()
|
||||
self.baseconfig_valid = True
|
||||
self.parsecache_valid = False
|
||||
|
||||
if self.state != state.parsing and not self.parsecache_valid:
|
||||
self.parseConfiguration ()
|
||||
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
|
||||
bb.event.fire(bb.event.SanityCheck(False), self.data)
|
||||
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
self.recipecache.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.recipecache.ignored_dependencies.add(dep)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
|
||||
|
||||
self.parser = CookerParser(self, filelist, masked)
|
||||
self.parsecache_valid = True
|
||||
|
||||
self.state = state.parsing
|
||||
self.state = state.parsing
|
||||
|
||||
if not self.parser.parse_next():
|
||||
collectlog.debug(1, "parsing complete")
|
||||
@@ -1476,7 +1309,7 @@ class BBCooker:
|
||||
raise bb.BBHandledException()
|
||||
self.show_appends_with_no_recipes()
|
||||
self.handlePrefProviders()
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
|
||||
self.state = state.running
|
||||
return None
|
||||
|
||||
@@ -1490,7 +1323,7 @@ class BBCooker:
|
||||
if len(pkgs_to_build) == 0:
|
||||
raise NothingToBuild
|
||||
|
||||
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
for pkg in pkgs_to_build:
|
||||
if pkg in ignore:
|
||||
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
@@ -1520,41 +1353,13 @@ class BBCooker:
|
||||
try:
|
||||
self.prhost = prserv.serv.auto_start(self.data)
|
||||
except prserv.serv.PRServiceConfigError:
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
self.state = state.error
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.data)
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
lockfile = self.lock.name
|
||||
self.lock.close()
|
||||
self.lock = None
|
||||
|
||||
while not self.lock:
|
||||
with bb.utils.timeout(3):
|
||||
self.lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=True)
|
||||
if not self.lock:
|
||||
# Some systems may not have lsof available
|
||||
procs = None
|
||||
try:
|
||||
procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
if procs is None:
|
||||
# Fall back to fuser if lsof is unavailable
|
||||
try:
|
||||
procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
|
||||
if procs:
|
||||
msg += ":\n%s" % str(procs)
|
||||
print(msg)
|
||||
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
|
||||
def shutdown(self, force = False):
|
||||
if force:
|
||||
@@ -1633,7 +1438,7 @@ class CookerCollectFiles(object):
|
||||
for ignored in ('SCCS', 'CVS', '.svn'):
|
||||
if ignored in dirs:
|
||||
dirs.remove(ignored)
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
|
||||
|
||||
return found
|
||||
|
||||
@@ -1731,7 +1536,7 @@ class CookerCollectFiles(object):
|
||||
filelist.append(filename)
|
||||
return filelist
|
||||
|
||||
def collection_priorities(self, pkgfns, d):
|
||||
def collection_priorities(self, pkgfns):
|
||||
|
||||
priorities = {}
|
||||
|
||||
@@ -1740,10 +1545,10 @@ class CookerCollectFiles(object):
|
||||
for p in pkgfns:
|
||||
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
|
||||
priorities[p] = self.calc_bbfile_priority(realfn, matched)
|
||||
|
||||
|
||||
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
|
||||
unmatched = set()
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
if not regex in matched:
|
||||
unmatched.add(regex)
|
||||
|
||||
@@ -1760,8 +1565,7 @@ class CookerCollectFiles(object):
|
||||
|
||||
for collection, pattern, regex, _ in self.bbfile_config_priorities:
|
||||
if regex in unmatched:
|
||||
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
return priorities
|
||||
|
||||
@@ -2061,7 +1865,7 @@ class CookerParser(object):
|
||||
self.skipped += 1
|
||||
self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
|
||||
self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
|
||||
parsed=parsed, watcher = self.cooker.add_filewatch)
|
||||
parsed=parsed)
|
||||
return True
|
||||
|
||||
def reparse(self, filename):
|
||||
|
||||
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ConfigParameters(object):
|
||||
def __init__(self, argv=sys.argv):
|
||||
self.options, targets = self.parseCommandLine(argv)
|
||||
def __init__(self):
|
||||
self.options, targets = self.parseCommandLine()
|
||||
self.environment = self.parseEnvironment()
|
||||
|
||||
self.options.pkgs_to_build = targets or []
|
||||
@@ -46,7 +46,7 @@ class ConfigParameters(object):
|
||||
for key, val in self.options.__dict__.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
raise Exception("Caller must implement commandline option parsing")
|
||||
|
||||
def parseEnvironment(self):
|
||||
@@ -69,17 +69,6 @@ class ConfigParameters(object):
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server, environment):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
def parseActions(self):
|
||||
# Parse any commandline into actions
|
||||
action = {'action':None, 'msg':None}
|
||||
@@ -139,7 +128,6 @@ class CookerConfiguration(object):
|
||||
self.dry_run = False
|
||||
self.tracking = False
|
||||
self.interface = []
|
||||
self.writeeventlog = False
|
||||
|
||||
self.env = {}
|
||||
|
||||
@@ -239,13 +227,10 @@ class CookerDataBuilder(object):
|
||||
try:
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
except SyntaxError:
|
||||
raise bb.BBHandledException
|
||||
except bb.data_smart.ExpansionError as e:
|
||||
logger.error(str(e))
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
logger.exception("Error parsing configuration files")
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
|
||||
def _findLayerConf(self, data):
|
||||
return findConfigFile("bblayers.conf", data)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Python Daemonizing helper
|
||||
Python Deamonizing helper
|
||||
|
||||
Configurable daemon behaviors:
|
||||
|
||||
@@ -12,11 +12,8 @@ A failed call to fork() now raises an exception.
|
||||
|
||||
References:
|
||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||
http://www.apuebook.com/apue3e.html
|
||||
2) The Linux Programming Interface: Michael Kerrisk
|
||||
http://man7.org/tlpi/index.html
|
||||
3) Unix Programming Frequently Asked Questions:
|
||||
http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
||||
2) Unix Programming Frequently Asked Questions:
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||
|
||||
Modified to allow a function to be daemonized and return for
|
||||
bitbake use by Richard Purdie
|
||||
@@ -28,7 +25,7 @@ __version__ = "0.2"
|
||||
|
||||
# Standard Python modules.
|
||||
import os # Miscellaneous OS interfaces.
|
||||
import sys # System-specific parameters and functions.
|
||||
import sys # System-specific parameters and functions.
|
||||
|
||||
# Default daemon parameters.
|
||||
# File mode creation mask of the daemon.
|
||||
@@ -131,7 +128,7 @@ def createDaemon(function, logfile):
|
||||
# of methods to accomplish this task. Three are listed below.
|
||||
#
|
||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||
# number of open file descriptors to close. If it doesn't exist, use
|
||||
# number of open file descriptors to close. If it doesn't exists, use
|
||||
# the default value (configurable).
|
||||
#
|
||||
# try:
|
||||
@@ -149,7 +146,7 @@ def createDaemon(function, logfile):
|
||||
# OR
|
||||
#
|
||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||
# that can be opened by this process. If there is no limit on the
|
||||
# that can be opened by this process. If there is not limit on the
|
||||
# resource, use the default value.
|
||||
#
|
||||
import resource # Resource usage information.
|
||||
|
||||
@@ -6,7 +6,7 @@ BitBake 'Data' implementations
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
The expandKeys and update_data are the most expensive
|
||||
The expandData and update_data are the most expensive
|
||||
operations. At night the cookie monster came by and
|
||||
suggested 'give me cookies on setting the variables and
|
||||
things will work out'. Taking this suggestion into account
|
||||
@@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
|
||||
monster seems to be right. We will track setVar more carefully
|
||||
to have faster update_data and expandKeys operations.
|
||||
|
||||
This is a trade-off between speed and memory again but
|
||||
This is a treade-off between speed and memory again but
|
||||
the speed is more critical here.
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@ the speed is more critical here.
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import sys, os, re
|
||||
if sys.argv[0][-5:] == "pydoc":
|
||||
@@ -219,13 +219,6 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
|
||||
val = str(val)
|
||||
|
||||
if varExpanded.startswith("BASH_FUNC_"):
|
||||
varExpanded = varExpanded[10:-2]
|
||||
val = val[3:] # Strip off "() "
|
||||
o.write("%s() %s\n" % (varExpanded, val))
|
||||
o.write("export -f %s\n" % (varExpanded))
|
||||
return 1
|
||||
|
||||
if func:
|
||||
# NOTE: should probably check for unbalanced {} within the var
|
||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||
@@ -289,41 +282,6 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}"""
|
||||
|
||||
def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
def write_func(func, o, call = False):
|
||||
body = d.getVar(func, True)
|
||||
if not body.startswith("def"):
|
||||
body = _functionfmt.format(function=func, body=body)
|
||||
|
||||
o.write(body.strip() + "\n\n")
|
||||
if call:
|
||||
o.write(func + "(d)" + "\n\n")
|
||||
|
||||
write_func(func, o, True)
|
||||
pp = bb.codeparser.PythonParser(func, logger)
|
||||
pp.parse_python(d.getVar(func, True))
|
||||
newdeps = pp.execs
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
seen = set()
|
||||
while newdeps:
|
||||
deps = newdeps
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
|
||||
write_func(dep, o)
|
||||
pp = bb.codeparser.PythonParser(dep, logger)
|
||||
pp.parse_python(d.getVar(dep, True))
|
||||
newdeps |= pp.execs
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
def update_data(d):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
d.finalize(parent = True)
|
||||
|
||||
@@ -263,7 +263,7 @@ class VariableHistory(object):
|
||||
flag = ''
|
||||
o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
|
||||
if len(history) > 1:
|
||||
o.write("# pre-expansion value:\n")
|
||||
o.write("# computed:\n")
|
||||
o.write('# "%s"\n' % (commentVal))
|
||||
else:
|
||||
o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
|
||||
@@ -296,14 +296,9 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self, special = None, seen = None ):
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
if special is None:
|
||||
special = COWDictBase.copy()
|
||||
if seen is None:
|
||||
seen = COWDictBase.copy()
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
@@ -339,7 +334,7 @@ class DataSmart(MutableMapping):
|
||||
break
|
||||
except ExpansionError:
|
||||
raise
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise ExpansionError(varname, s, exc)
|
||||
@@ -518,15 +513,10 @@ class DataSmart(MutableMapping):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override:
|
||||
if len(override) > 0:
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
@@ -594,7 +584,7 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
@@ -610,8 +600,8 @@ class DataSmart(MutableMapping):
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -621,10 +611,8 @@ class DataSmart(MutableMapping):
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
removes = [self.expand(r).split() for r in local_var["_removeactive"]]
|
||||
removes = reduce(lambda a, b: a+b, removes, [])
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
filtered = filter(lambda v: v not in local_var["_removeactive"],
|
||||
value.split(" "))
|
||||
value = " ".join(filtered)
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
@@ -746,16 +734,12 @@ class DataSmart(MutableMapping):
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
if key == "_data":
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
klist.add(key)
|
||||
|
||||
|
||||
@@ -55,7 +55,6 @@ def get_class_handlers():
|
||||
return _handlers
|
||||
|
||||
def set_class_handlers(h):
|
||||
global _handlers
|
||||
_handlers = h
|
||||
|
||||
def clean_class_handlers():
|
||||
@@ -68,13 +67,12 @@ _ui_logfilters = {}
|
||||
_ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception:
|
||||
etype, value, tb = sys.exc_info()
|
||||
@@ -96,10 +94,10 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
continue
|
||||
execute_handler(name, handler, event, d)
|
||||
try:
|
||||
execute_handler(name, handler, event, d)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ui_queue = []
|
||||
@atexit.register
|
||||
@@ -209,10 +207,6 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
@@ -603,11 +597,11 @@ class MetadataEvent(Event):
|
||||
def __init__(self, eventtype, eventdata):
|
||||
Event.__init__(self)
|
||||
self.type = eventtype
|
||||
self._localdata = eventdata
|
||||
self.data = eventdata
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to run sanity checks, either raise errors or generate events as return status.
|
||||
Event to runs sanity checks, either raise errors or generate events as return status.
|
||||
"""
|
||||
def __init__(self, generateevents = True):
|
||||
Event.__init__(self)
|
||||
@@ -615,7 +609,7 @@ class SanityCheck(Event):
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has passed
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
|
||||
@@ -45,13 +45,6 @@ _checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
class BBFetchException(Exception):
|
||||
"""Class all fetch exceptions inherit from"""
|
||||
def __init__(self, message):
|
||||
@@ -63,11 +56,8 @@ class BBFetchException(Exception):
|
||||
|
||||
class MalformedUrl(BBFetchException):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
def __init__(self, url):
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
@@ -381,11 +371,8 @@ def decodeurl(url):
|
||||
p = {}
|
||||
if parm:
|
||||
for s in parm.split(';'):
|
||||
if s:
|
||||
if not '=' in s:
|
||||
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
|
||||
@@ -532,7 +519,7 @@ def fetcher_compare_revisions(d):
|
||||
def mirror_from_string(data):
|
||||
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||
|
||||
def verify_checksum(ud, d, precomputed={}):
|
||||
def verify_checksum(ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
@@ -540,33 +527,18 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
|
||||
Returns a dict of checksums that can be stored in a done stamp file and
|
||||
passed in as precomputed parameter in a later call to avoid re-computing
|
||||
the checksums from the file. This allows verifying the checksums of the
|
||||
file against those in the recipe each time, rather than only after
|
||||
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
|
||||
"""
|
||||
|
||||
_MD5_KEY = "md5"
|
||||
_SHA256_KEY = "sha256"
|
||||
if not ud.method.supports_checksum(ud):
|
||||
return
|
||||
|
||||
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
|
||||
return {}
|
||||
|
||||
if _MD5_KEY in precomputed:
|
||||
md5data = precomputed[_MD5_KEY]
|
||||
else:
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
|
||||
if _SHA256_KEY in precomputed:
|
||||
sha256data = precomputed[_SHA256_KEY]
|
||||
else:
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
|
||||
if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if strict and not (ud.md5_expected or ud.sha256_expected):
|
||||
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
@@ -611,72 +583,6 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
|
||||
|
||||
return {
|
||||
_MD5_KEY: md5data,
|
||||
_SHA256_KEY: sha256data
|
||||
}
|
||||
|
||||
|
||||
def verify_donestamp(ud, d):
|
||||
"""
|
||||
Check whether the done stamp file has the right checksums (if the fetch
|
||||
method supports them). If it doesn't, delete the done stamp and force
|
||||
a re-download.
|
||||
|
||||
Returns True, if the donestamp exists and is valid, False otherwise. When
|
||||
returning False, any existing done stamps are removed.
|
||||
"""
|
||||
if not os.path.exists(ud.donestamp):
|
||||
return False
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
# done stamp exists, checksums not supported; assume the local file is
|
||||
# current
|
||||
return True
|
||||
|
||||
if not os.path.exists(ud.localpath):
|
||||
# done stamp exists, but the downloaded file does not; the done stamp
|
||||
# must be incorrect, re-trigger the download
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
precomputed_checksums = {}
|
||||
# Only re-use the precomputed checksums if the donestamp is newer than the
|
||||
# file. Do not rely on the mtime of directories, though. If ud.localpath is
|
||||
# a directory, there will probably not be any checksums anyway.
|
||||
if (os.path.isdir(ud.localpath) or
|
||||
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
|
||||
try:
|
||||
with open(ud.donestamp, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
precomputed_checksums.update(pickled.load())
|
||||
except Exception as e:
|
||||
# Avoid the warnings on the upgrade path from emtpy done stamp
|
||||
# files to those containing the checksums.
|
||||
if not isinstance(e, EOFError):
|
||||
# Ignore errors, they aren't fatal
|
||||
logger.warn("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
|
||||
try:
|
||||
checksums = verify_checksum(ud, d, precomputed_checksums)
|
||||
# If the cache file did not have the checksums, compute and store them
|
||||
# as an upgrade path from the previous done stamp file format.
|
||||
if checksums != precomputed_checksums:
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
return True
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
|
||||
def update_stamp(ud, d):
|
||||
"""
|
||||
@@ -691,11 +597,8 @@ def update_stamp(ud, d):
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
else:
|
||||
checksums = verify_checksum(ud, d)
|
||||
# Store the checksums for later re-verification against the recipe
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
verify_checksum(ud, d)
|
||||
open(ud.donestamp, 'w').close()
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
@@ -711,13 +614,11 @@ def get_autorev(d):
|
||||
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the revsion string, usually for use in the version string (PV) of the current package
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
|
||||
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
|
||||
incremental, other code is then responsible for turning that into an increasing value (if needed)
|
||||
"""
|
||||
|
||||
scms = []
|
||||
@@ -896,7 +797,7 @@ def try_mirror_url(origud, ud, ld, check = False):
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not verify_donestamp(ud, ld) or ud.method.need_update(ud, ld):
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
|
||||
ud.method.download(ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(ud, ld)
|
||||
@@ -912,13 +813,12 @@ def try_mirror_url(origud, ud, ld, check = False):
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
# Create donestamp in old format to avoid triggering a re-download
|
||||
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
|
||||
if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
|
||||
origud.method.download(origud, ld)
|
||||
if hasattr(origud.method,"build_mirror_data"):
|
||||
origud.method.build_mirror_data(origud, ld)
|
||||
@@ -1030,21 +930,22 @@ def get_checksum_file_list(d):
|
||||
ud = fetch.ud[u]
|
||||
|
||||
if ud and isinstance(ud.method, local.Local):
|
||||
paths = ud.method.localpaths(ud, d)
|
||||
for f in paths:
|
||||
pth = ud.decodedurl
|
||||
if '*' in pth:
|
||||
f = os.path.join(os.path.abspath(f), pth)
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
filelist.append(f + ":" + str(os.path.exists(f)))
|
||||
ud.setup_localpath(d)
|
||||
f = ud.localpath
|
||||
pth = ud.decodedurl
|
||||
if '*' in pth:
|
||||
f = os.path.join(os.path.abspath(f), pth)
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
filelist.append(f)
|
||||
|
||||
return " ".join(filelist)
|
||||
|
||||
|
||||
def get_file_checksums(filelist, pn):
|
||||
"""Get a list of the checksums for a list of local files
|
||||
|
||||
@@ -1074,10 +975,7 @@ def get_file_checksums(filelist, pn):
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
exist = pth.split(":")[1]
|
||||
if exist == "False":
|
||||
continue
|
||||
pth = pth.split(":")[0]
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
for f in glob.glob(pth):
|
||||
@@ -1085,11 +983,16 @@ def get_file_checksums(filelist, pn):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
checksums.append((f, checksum))
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
continue
|
||||
elif os.path.isdir(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
continue
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
@@ -1136,7 +1039,6 @@ class FetchData(object):
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.ignore_checksums = False
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -1293,9 +1195,9 @@ class FetchMethod(object):
|
||||
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
|
||||
(file, urldata.parm.get('unpack')))
|
||||
|
||||
base, ext = os.path.splitext(file)
|
||||
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
|
||||
efile = os.path.join(rootdir, os.path.basename(base))
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -1315,10 +1217,6 @@ class FetchMethod(object):
|
||||
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.xz'):
|
||||
cmd = 'xz -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.tar.lz'):
|
||||
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.lz'):
|
||||
cmd = 'lzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip') or file.endswith('.jar'):
|
||||
try:
|
||||
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
|
||||
@@ -1364,13 +1262,8 @@ class FetchMethod(object):
|
||||
# items. So, only do so for file:// entries.
|
||||
if urldata.type == "file" and urldata.path.find("/") != -1:
|
||||
destdir = urldata.path.rsplit("/", 1)[0]
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir') + "/" + destdir
|
||||
else:
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir')
|
||||
else:
|
||||
destdir = "."
|
||||
destdir = "."
|
||||
bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
|
||||
cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
|
||||
|
||||
@@ -1514,7 +1407,7 @@ class Fetch(object):
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
|
||||
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
@@ -1527,7 +1420,7 @@ class Fetch(object):
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)):
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
|
||||
try:
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
@@ -1665,7 +1558,6 @@ from . import bzr
|
||||
from . import hg
|
||||
from . import osc
|
||||
from . import repo
|
||||
from . import clearcase
|
||||
|
||||
methods.append(local.Local())
|
||||
methods.append(wget.Wget())
|
||||
@@ -1681,4 +1573,3 @@ methods.append(bzr.Bzr())
|
||||
methods.append(hg.Hg())
|
||||
methods.append(osc.Osc())
|
||||
methods.append(repo.Repo())
|
||||
methods.append(clearcase.ClearCase())
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' clearcase implementation
|
||||
|
||||
The clearcase fetcher is used to retrieve files from a ClearCase repository.
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
Supported SRC_URI options are:
|
||||
|
||||
- vob
|
||||
(required) The name of the clearcase VOB (with prepending "/")
|
||||
|
||||
- module
|
||||
The module in the selected VOB (with prepending "/")
|
||||
|
||||
The module and vob parameters are combined to create
|
||||
the following load rule in the view config spec:
|
||||
load <vob><module>
|
||||
|
||||
- proto
|
||||
http or https
|
||||
|
||||
Related variables:
|
||||
|
||||
CCASE_CUSTOM_CONFIG_SPEC
|
||||
Write a config spec to this variable in your recipe to use it instead
|
||||
of the default config spec generated by this fetcher.
|
||||
Please note that the SRCREV loses its functionality if you specify
|
||||
this variable. SRCREV is still used to label the archive after a fetch,
|
||||
but it doesn't define what's fetched.
|
||||
|
||||
User credentials:
|
||||
cleartool:
|
||||
The login of cleartool is handled by the system. No special steps needed.
|
||||
|
||||
rcleartool:
|
||||
In order to use rcleartool with authenticated users an `rcleartool login` is
|
||||
necessary before using the fetcher.
|
||||
"""
|
||||
# Copyright (C) 2014 Siemens AG
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from distutils import spawn
|
||||
|
||||
class ClearCase(FetchMethod):
|
||||
"""Class to fetch urls via 'clearcase'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with Clearcase.
|
||||
"""
|
||||
return ud.type in ['ccrc']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "ClearCase: %s", msg)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init ClearCase specific variable within url data
|
||||
"""
|
||||
ud.proto = "https"
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
if not ud.proto in ('http', 'https'):
|
||||
raise fetch2.ParameterError("Invalid protocol type", ud.url)
|
||||
|
||||
ud.vob = ''
|
||||
if 'vob' in ud.parm:
|
||||
ud.vob = ud.parm['vob']
|
||||
else:
|
||||
msg = ud.url+": vob must be defined so the fetcher knows what to get."
|
||||
raise MissingParameterError('vob', msg)
|
||||
|
||||
if 'module' in ud.parm:
|
||||
ud.module = ud.parm['module']
|
||||
else:
|
||||
ud.module = ""
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
|
||||
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV")
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
|
||||
ud.module.replace("/", "."),
|
||||
ud.label.replace("/", "."))
|
||||
|
||||
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
|
||||
ud.csname = "%s-config-spec" % (ud.identifier)
|
||||
ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
|
||||
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
|
||||
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
|
||||
ud.localfile = "%s.tar.gz" % (ud.identifier)
|
||||
|
||||
self.debug("host = %s" % ud.host)
|
||||
self.debug("path = %s" % ud.path)
|
||||
self.debug("server = %s" % ud.server)
|
||||
self.debug("proto = %s" % ud.proto)
|
||||
self.debug("type = %s" % ud.type)
|
||||
self.debug("vob = %s" % ud.vob)
|
||||
self.debug("module = %s" % ud.module)
|
||||
self.debug("basecmd = %s" % ud.basecmd)
|
||||
self.debug("label = %s" % ud.label)
|
||||
self.debug("ccasedir = %s" % ud.ccasedir)
|
||||
self.debug("viewdir = %s" % ud.viewdir)
|
||||
self.debug("viewname = %s" % ud.viewname)
|
||||
self.debug("configspecfile = %s" % ud.configspecfile)
|
||||
self.debug("localfile = %s" % ud.localfile)
|
||||
|
||||
ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _build_ccase_command(self, ud, command):
|
||||
"""
|
||||
Build up a commandline based on ud
|
||||
command is: mkview, setcs, rmview
|
||||
"""
|
||||
options = []
|
||||
|
||||
if "rcleartool" in ud.basecmd:
|
||||
options.append("-server %s" % ud.server)
|
||||
|
||||
basecmd = "%s %s" % (ud.basecmd, command)
|
||||
|
||||
if command is 'mkview':
|
||||
if not "rcleartool" in ud.basecmd:
|
||||
# Cleartool needs a -snapshot view
|
||||
options.append("-snapshot")
|
||||
options.append("-tag %s" % ud.viewname)
|
||||
options.append(ud.viewdir)
|
||||
|
||||
elif command is 'rmview':
|
||||
options.append("-force")
|
||||
options.append("%s" % ud.viewdir)
|
||||
|
||||
elif command is 'setcs':
|
||||
options.append("-overwrite")
|
||||
options.append(ud.configspecfile)
|
||||
|
||||
else:
|
||||
raise FetchError("Invalid ccase command %s" % command)
|
||||
|
||||
ccasecmd = "%s %s" % (basecmd, " ".join(options))
|
||||
self.debug("ccasecmd = %s" % ccasecmd)
|
||||
return ccasecmd
|
||||
|
||||
def _write_configspec(self, ud, d):
|
||||
"""
|
||||
Create config spec file (ud.configspecfile) for ccase view
|
||||
"""
|
||||
config_spec = ""
|
||||
custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
|
||||
if custom_config_spec is not None:
|
||||
for line in custom_config_spec.split("\\n"):
|
||||
config_spec += line+"\n"
|
||||
bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
|
||||
else:
|
||||
config_spec += "element * CHECKEDOUT\n"
|
||||
config_spec += "element * %s\n" % ud.label
|
||||
config_spec += "load %s%s\n" % (ud.vob, ud.module)
|
||||
|
||||
logger.info("Using config spec: \n%s" % config_spec)
|
||||
|
||||
with open(ud.configspecfile, 'w') as f:
|
||||
f.write(config_spec)
|
||||
|
||||
def _remove_view(self, ud, d):
|
||||
if os.path.exists(ud.viewdir):
|
||||
os.chdir(ud.ccasedir)
|
||||
cmd = self._build_ccase_command(ud, 'rmview');
|
||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("rmview output: %s", output)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
|
||||
ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
|
||||
return True
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
return False, ud.identifier
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# Make a fresh view
|
||||
bb.utils.mkdirhier(ud.ccasedir)
|
||||
self._write_configspec(ud, d)
|
||||
cmd = self._build_ccase_command(ud, 'mkview')
|
||||
logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
try:
|
||||
runfetchcmd(cmd, d)
|
||||
except FetchError as e:
|
||||
if "CRCLI2008E" in e.msg:
|
||||
raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
|
||||
else:
|
||||
raise e
|
||||
|
||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
||||
os.chdir(ud.viewdir)
|
||||
cmd = self._build_ccase_command(ud, 'setcs');
|
||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("%s", output)
|
||||
|
||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
||||
shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
|
||||
|
||||
# Clean clearcase meta-data before tar
|
||||
|
||||
runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
|
||||
|
||||
# Clean up so we can create a new view next time
|
||||
self.clean(ud, d);
|
||||
|
||||
def clean(self, ud, d):
|
||||
self._remove_view(ud, d)
|
||||
bb.utils.remove(ud.configspecfile)
|
||||
@@ -67,7 +67,6 @@ Supported SRC_URI options are:
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -124,7 +123,7 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = branch
|
||||
ud.unresolvedrev[name] = branch
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||
|
||||
@@ -276,7 +275,7 @@ class Git(FetchMethod):
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
@@ -340,63 +339,17 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output" % ud.unresolvedrev[name])
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
verstring = ""
|
||||
tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*^{}")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return ""
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
line = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", line)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(line)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
verstring = tag
|
||||
|
||||
return verstring
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -2,16 +2,6 @@
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git submodules implementation
|
||||
|
||||
Inherits from and extends the Git fetcher to retrieve submodules of a git repository
|
||||
after cloning.
|
||||
|
||||
SRC_URI = "gitsm://<see Git fetcher for syntax>"
|
||||
|
||||
See the Git fetcher, git://, for usage documentation.
|
||||
|
||||
NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2013 Richard Purdie
|
||||
|
||||
@@ -123,10 +123,7 @@ class Hg(FetchMethod):
|
||||
else:
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
|
||||
@@ -51,41 +51,29 @@ class Local(FetchMethod):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
return self.localpaths(urldata, d)[-1]
|
||||
|
||||
def localpaths(self, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
searched = []
|
||||
path = urldata.decodedurl
|
||||
newpath = path
|
||||
if path[0] == "/":
|
||||
return [path]
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath, hist = bb.utils.which(filespath, path, history=True)
|
||||
searched.extend(hist)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
searched.append(newpath)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath, hist = bb.utils.which(filespath, ".", history=True)
|
||||
searched.extend(hist)
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return searched
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
searched.append(dldirfile)
|
||||
return searched
|
||||
return searched
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
||||
newpath = bb.utils.which(filespath, ".")
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
||||
return newpath
|
||||
if not os.path.exists(newpath):
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
return dldirfile
|
||||
return newpath
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ud.url.find("*") != -1:
|
||||
|
||||
@@ -103,15 +103,22 @@ class Perforce(FetchMethod):
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which-1]
|
||||
|
||||
base = self._strip_leading_slashes(base)
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""
|
||||
|
||||
@@ -87,8 +87,7 @@ class SSH(FetchMethod):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
|
||||
os.path.basename(os.path.normpath(path)))
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
|
||||
def download(self, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
|
||||
@@ -101,8 +101,7 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
transportuser = ud.parm.get("transportuser", "")
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
else:
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,7 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -108,274 +104,3 @@ class Wget(FetchMethod):
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
pname = ''
|
||||
pver = ''
|
||||
ptype = ''
|
||||
|
||||
mdict = m.groupdict()
|
||||
if 'name' in mdict.keys():
|
||||
pname = mdict['name']
|
||||
if 'pver' in mdict.keys():
|
||||
pver = mdict['pver']
|
||||
if 'type' in mdict.keys():
|
||||
ptype = mdict['type']
|
||||
|
||||
bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
|
||||
|
||||
return (pname, pver, ptype)
|
||||
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.1000.', version)
|
||||
version = re.sub('(beta)+', '.100.', version)
|
||||
version = re.sub('(alpha)+', '.10.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return ""
|
||||
"""
|
||||
valid = 0
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
bb.debug(3, "line['href'] = '%s'" % (line['href']))
|
||||
bb.debug(3, "line = '%s'" % (str(line)))
|
||||
|
||||
newver = self._parse_path(package_regex, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(package_regex, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) < 0:
|
||||
version = newver
|
||||
|
||||
pupver = re.sub('_', '.', version[1])
|
||||
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, pupver or "N/A", current_version[1]))
|
||||
|
||||
if valid:
|
||||
return pupver
|
||||
|
||||
return ""
|
||||
|
||||
def _check_latest_version_by_dir(self, dirver, package, package_regex,
|
||||
current_version, ud, d):
|
||||
"""
|
||||
Scan every directory in order to get upstream version.
|
||||
"""
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(\D*)((\d+[\.-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group(2)
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
version_dir_new = ['', s.group(2), '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = s.group(1) + s.group(2)
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
ud.user, ud.pswd, {}])
|
||||
|
||||
pupver = self._check_latest_version(uri,
|
||||
package, package_regex, current_version, ud, d)
|
||||
if pupver:
|
||||
version[1] = pupver
|
||||
|
||||
version_dir = version_dir_new
|
||||
|
||||
return version[1]
|
||||
|
||||
def _init_regexes(self, package, ud, d):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
version = self._parse_path(package_regex_comp, package)
|
||||
if version:
|
||||
package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
|
||||
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
|
||||
else:
|
||||
package_custom_regex_comp = None
|
||||
|
||||
return package_custom_regex_comp
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
current_version = ['', d.getVar('PV', True), '']
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return current_version[1]
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ""
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("REGEX_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
|
||||
m = dirver_regex.search(path)
|
||||
if m:
|
||||
pn = d.getVar('PN', True)
|
||||
dirver = m.group('dirver')
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d)
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d)
|
||||
|
||||
@@ -1,391 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
__version__ = "1.26.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(bb.BBHandledException):
|
||||
pass
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
if "BBEVENTLOG" is os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
def bitbake_main(configParams, configuration):
|
||||
|
||||
# Python multiprocessing requires /dev/shm on Linux
|
||||
if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
|
||||
"servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
|
||||
"to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--remote-server' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
return 0
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
@@ -52,10 +52,10 @@ def getMountedDev(path):
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parent's, then the current directory is a mount point
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parent's directory
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
@@ -77,7 +77,7 @@ def getDiskData(BBDirs, configuration):
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directory is located on the same
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
@@ -187,11 +187,11 @@ class diskMonitor:
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid printing too many warning messages
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid printing the message
|
||||
# repeatedly while waiting for the tasks to finish
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for k in self.devDict:
|
||||
self.preFreeS[k] = 0
|
||||
@@ -239,9 +239,11 @@ class diskMonitor:
|
||||
freeInode = st.f_favail
|
||||
|
||||
if minInode and freeInode < minInode:
|
||||
# Some filesystems use dynamic inodes so can't run out
|
||||
# (e.g. btrfs). This is reported by the inode count being 0.
|
||||
# Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
|
||||
# zero, this is a feature of the fs, we disable the inode
|
||||
# checking for such a fs.
|
||||
if st.f_files == 0:
|
||||
logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
|
||||
self.devDict[k][2] = None
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
|
||||
@@ -202,8 +202,8 @@ if __name__ == '__main__':
|
||||
print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
|
||||
print(rec5._replace(k=222).count(2)) # MyMixIn's
|
||||
|
||||
# Note that behavior: the standard namedtuple methods cannot be
|
||||
# overridden by a foreign mix-in -- even if the mix-in is declared
|
||||
# None that behavior: the standard namedtuple methods cannot be
|
||||
# overriden by a foreign mix-in -- even if the mix-in is declared
|
||||
# as the leftmost base class (but, obviously, you can override them
|
||||
# in the defined class or its subclasses):
|
||||
|
||||
|
||||
@@ -49,11 +49,8 @@ class ParseError(Exception):
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipRecipe(Exception):
|
||||
"""Exception raised to skip this recipe"""
|
||||
|
||||
class SkipPackage(SkipRecipe):
|
||||
"""Exception raised to skip this recipe (use SkipRecipe in new code)"""
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
|
||||
__mtime_cache = {}
|
||||
def cached_mtime(f):
|
||||
@@ -73,11 +70,6 @@ def update_mtime(f):
|
||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return __mtime_cache[f]
|
||||
|
||||
def update_cache(f):
|
||||
if f in __mtime_cache:
|
||||
logger.debug(1, "Updating mtime cache for %s" % f)
|
||||
update_mtime(f)
|
||||
|
||||
def mark_dependency(d, f):
|
||||
if f.startswith('./'):
|
||||
f = "%s/%s" % (os.getcwd(), f[2:])
|
||||
@@ -129,6 +121,7 @@ def resolve_file(fn, d):
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
logger.debug(2, "LOAD %s", fn)
|
||||
return fn
|
||||
|
||||
# Used by OpenEmbedded metadata
|
||||
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -139,7 +139,7 @@ class DataNode(AstNode):
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%', '______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
@@ -226,8 +226,6 @@ class ExportFuncsNode(AstNode):
|
||||
if data.getVarFlag(calledfunc, "python"):
|
||||
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
|
||||
else:
|
||||
if "-" in self.classname:
|
||||
bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
|
||||
data.setVar(func, " " + calledfunc + "\n")
|
||||
data.setVarFlag(func, 'export_func', '1')
|
||||
|
||||
@@ -339,10 +337,8 @@ def finalize(fn, d, variant = None):
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def _create_variants(datastores, names, function):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
if onlyfinalise and name not in onlyfinalise:
|
||||
return
|
||||
new_d = bb.data.createCopy(orig_d)
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
@@ -379,7 +375,7 @@ def _expand_versions(versions):
|
||||
def multi_finalize(fn, d):
|
||||
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
||||
for append in appends:
|
||||
logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
|
||||
logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
|
||||
bb.parse.BBHandler.handle(append, d, True)
|
||||
|
||||
onlyfinalise = d.getVar("__ONLYFINALISE", False)
|
||||
@@ -388,7 +384,7 @@ def multi_finalize(fn, d):
|
||||
d = bb.data.createCopy(safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
datastores = {"": safe_d}
|
||||
|
||||
@@ -431,10 +427,10 @@ def multi_finalize(fn, d):
|
||||
verfunc(pv, d, safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
_create_variants(datastores, versions, verfunc, onlyfinalise)
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
extended = d.getVar("BBCLASSEXTEND", True) or ""
|
||||
if extended:
|
||||
@@ -464,14 +460,14 @@ def multi_finalize(fn, d):
|
||||
bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
|
||||
@@ -86,7 +86,7 @@ def inherit(files, fn, lineno, d):
|
||||
file = abs_fn
|
||||
|
||||
if not file in __inherit_cache:
|
||||
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
@@ -124,6 +124,12 @@ def handle(fn, d, include):
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
|
||||
if include == 0:
|
||||
logger.debug(2, "BB %s: handle(data)", fn)
|
||||
else:
|
||||
logger.debug(2, "BB %s: handle(data, include)", fn)
|
||||
|
||||
base_name = os.path.basename(fn)
|
||||
(root, ext) = os.path.splitext(base_name)
|
||||
init(d)
|
||||
@@ -154,7 +160,7 @@ def handle(fn, d, include):
|
||||
|
||||
try:
|
||||
statements.eval(d)
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
bb.data.setVar("__SKIPPED", True, d)
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"""
|
||||
^
|
||||
@@ -66,36 +66,38 @@ def init(data):
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(parentfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
"""
|
||||
if parentfn == fn: # prevent infinite recursion
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
parentfn = data.expand(parentfn)
|
||||
oldfn = data.expand(oldfn)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(parentfn)
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
ret = bb.parse.handle(fn, data, True)
|
||||
ret = handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), parentfn, lineno)
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
|
||||
@@ -199,9 +199,7 @@ class PersistData(object):
|
||||
del self.data[domain][key]
|
||||
|
||||
def connect(database):
|
||||
connection = sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
connection.execute("pragma synchronous = off;")
|
||||
return connection
|
||||
return sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
|
||||
@@ -38,4 +38,4 @@ class ExitSignal(ShellError):
|
||||
|
||||
class ReturnSignal(ShellError):
|
||||
"""Exit signal."""
|
||||
pass
|
||||
pass
|
||||
@@ -97,7 +97,7 @@ class RunQueueScheduler(object):
|
||||
def __init__(self, runqueue, rqdata):
|
||||
"""
|
||||
The default scheduler just returns the first buildable task (the
|
||||
priority map is sorted by task number)
|
||||
priority map is sorted by task numer)
|
||||
"""
|
||||
self.rq = runqueue
|
||||
self.rqdata = rqdata
|
||||
@@ -186,7 +186,7 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
|
||||
"""
|
||||
A scheduler optimised to complete .bb files are quickly as possible. The
|
||||
priority map is sorted by task weight, but then reordered so once a given
|
||||
.bb file starts to build, it's completed as quickly as possible. This works
|
||||
.bb file starts to build, its completed as quickly as possible. This works
|
||||
well where disk space is at a premium and classes like OE's rm_work are in
|
||||
force.
|
||||
"""
|
||||
@@ -430,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -795,7 +795,7 @@ class RunQueueData:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
|
||||
# Iterate over the task list and call into the siggen code
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
todeal = set(range(len(self.runq_fnid)))
|
||||
while len(todeal) > 0:
|
||||
@@ -859,18 +859,15 @@ class RunQueue:
|
||||
|
||||
def _start_worker(self, fakeroot = False, rqexec = None):
|
||||
logger.debug(1, "Starting bitbake-worker")
|
||||
magic = "decafbad"
|
||||
if self.cooker.configuration.profile:
|
||||
magic = "decafbadbad"
|
||||
if fakeroot:
|
||||
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
|
||||
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
|
||||
env = os.environ.copy()
|
||||
for key, value in (var.split('=') for var in fakerootenv):
|
||||
env[key] = value
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
else:
|
||||
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
bb.utils.nonblockingfd(worker.stdout)
|
||||
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
|
||||
|
||||
@@ -879,7 +876,9 @@ class RunQueue:
|
||||
"fakerootenv" : self.rqdata.dataCache.fakerootenv,
|
||||
"fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
|
||||
"fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
|
||||
"sigdata" : bb.parse.siggen.get_taskdata(),
|
||||
"hashes" : bb.parse.siggen.taskhash,
|
||||
"hash_deps" : bb.parse.siggen.runtaskdeps,
|
||||
"sigchecksums" : bb.parse.siggen.file_checksum_values,
|
||||
"runq_hash" : self.rqdata.runq_hash,
|
||||
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
|
||||
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
|
||||
@@ -968,11 +967,11 @@ class RunQueue:
|
||||
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
|
||||
# If the stamp is missing, it's not current
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
logger.debug(2, "Stampfile %s not available", stampfile)
|
||||
return False
|
||||
# If it's a 'nostamp' task, it's not current
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
|
||||
@@ -1064,9 +1063,9 @@ class RunQueue:
|
||||
retval = self.rqexe.execute()
|
||||
|
||||
if self.state is runQueueCleanUp:
|
||||
retval = self.rqexe.finish()
|
||||
self.rqexe.finish()
|
||||
|
||||
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
self.teardown_workers()
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
@@ -1107,7 +1106,6 @@ class RunQueue:
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
self.state = runQueueComplete
|
||||
return
|
||||
|
||||
if now:
|
||||
@@ -1155,7 +1153,7 @@ class RunQueue:
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
@@ -1242,8 +1240,6 @@ class RunQueue:
|
||||
prevh = __find_md5__.search(latestmatch).group(0)
|
||||
output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
|
||||
bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
|
||||
else:
|
||||
bb.plain("Error, can't find multiple tasks at divergence point? Was there a previously run task?")
|
||||
|
||||
class RunQueueExecute:
|
||||
|
||||
@@ -1308,14 +1304,15 @@ class RunQueueExecute:
|
||||
if self.stats.active > 0:
|
||||
bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
|
||||
self.rq.read_workers()
|
||||
return self.rq.active_fds()
|
||||
|
||||
return
|
||||
|
||||
if len(self.failed_fnids) != 0:
|
||||
self.rq.state = runQueueFailed
|
||||
return True
|
||||
return
|
||||
|
||||
self.rq.state = runQueueComplete
|
||||
return True
|
||||
return
|
||||
|
||||
def check_dependencies(self, task, taskdeps, setscene = False):
|
||||
if not self.rq.depvalidate:
|
||||
@@ -1333,7 +1330,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1402,7 +1399,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1554,8 +1551,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
self.runq_running[task] = 1
|
||||
self.stats.taskActive()
|
||||
if not self.cooker.configuration.dry_run:
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
self.task_complete(task)
|
||||
return True
|
||||
else:
|
||||
@@ -1690,7 +1686,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
|
||||
process_endpoints(endpoints)
|
||||
|
||||
# Build a list of setscene tasks which are "unskippable"
|
||||
# Build a list of setscene tasks which as "unskippable"
|
||||
# These are direct endpoints referenced by the build
|
||||
endpoints2 = {}
|
||||
sq_revdeps2 = []
|
||||
@@ -1822,7 +1818,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
@@ -1846,10 +1842,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
realtask = self.rqdata.runq_setscene[task]
|
||||
realdep = self.rqdata.runq_setscene[dep]
|
||||
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
|
||||
self.scenequeue_updatecounters(dep, fail)
|
||||
continue
|
||||
if task not in self.sq_revdeps2[dep]:
|
||||
# May already have been removed by the fail case above
|
||||
continue
|
||||
self.sq_revdeps2[dep].remove(task)
|
||||
if len(self.sq_revdeps2[dep]) == 0:
|
||||
@@ -1991,10 +1983,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
self.rq.state = runQueueRunInit
|
||||
|
||||
completeevent = sceneQueueComplete(self.stats, self.rq)
|
||||
bb.event.fire(completeevent, self.cfgData)
|
||||
|
||||
return True
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
@@ -2047,7 +2035,7 @@ class sceneQueueEvent(runQueueEvent):
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was started
|
||||
Event notifing a task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2055,7 +2043,7 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task was started
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2063,7 +2051,7 @@ class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task failed
|
||||
Event notifing a task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2071,33 +2059,25 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task failed
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueComplete(sceneQueueEvent):
|
||||
"""
|
||||
Event when all the sceneQueue tasks are complete
|
||||
"""
|
||||
def __init__(self, stats, rq):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task completed
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
class sceneQueueTaskCompleted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task completed
|
||||
Event notifing a setscene task completed
|
||||
"""
|
||||
|
||||
class runQueueTaskSkipped(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was skipped
|
||||
Event notifing a task was skipped
|
||||
"""
|
||||
def __init__(self, task, stats, rq, reason):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
|
||||
@@ -38,18 +38,14 @@ from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
class ServerCommunicator():
|
||||
def __init__(self, connection, event_handle, server):
|
||||
def __init__(self, connection, event_handle):
|
||||
self.connection = connection
|
||||
self.event_handle = event_handle
|
||||
self.server = server
|
||||
|
||||
def runCommand(self, command):
|
||||
# @todo try/except
|
||||
self.connection.send(command)
|
||||
|
||||
if not self.server.is_alive():
|
||||
raise SystemExit
|
||||
|
||||
while True:
|
||||
# don't let the user ctrl-c while we're waiting for a response
|
||||
try:
|
||||
@@ -115,7 +111,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
logger.exception('Running command %s', command)
|
||||
|
||||
@@ -135,9 +131,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
nextsleep = None
|
||||
elif retval is True:
|
||||
nextsleep = None
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
elif nextsleep is None:
|
||||
continue
|
||||
else:
|
||||
@@ -146,8 +139,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
|
||||
if nextsleep is not None:
|
||||
select.select(fds,[],[],nextsleep)
|
||||
@@ -167,7 +158,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
self.procserver = serverImpl
|
||||
self.ui_channel = ui_channel
|
||||
self.event_queue = event_queue
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
|
||||
self.events = self.event_queue
|
||||
|
||||
def sigterm_terminate(self):
|
||||
@@ -206,20 +197,14 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
raise KeyboardInterrupt()
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def getEvent(self):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
return None
|
||||
@@ -234,7 +219,6 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
self.ui_channel, self.server_channel = Pipe()
|
||||
self.event_queue = ProcessEventQueue(0)
|
||||
self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
|
||||
self.event_queue.server = self.serverImpl
|
||||
|
||||
def detach(self):
|
||||
self.serverImpl.start()
|
||||
|
||||
@@ -80,7 +80,7 @@ class BBTransport(xmlrpclib.Transport):
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
class BitBakeServerCommands():
|
||||
@@ -235,16 +235,12 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
del self._idlefuns[function]
|
||||
elif retval is True:
|
||||
nextsleep = 0
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
else:
|
||||
fds = fds + retval
|
||||
except SystemExit:
|
||||
@@ -252,21 +248,14 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if retval == None:
|
||||
# the function execute failed; delete it
|
||||
del self._idlefuns[function]
|
||||
pass
|
||||
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
socktimeout = min(socktimeout, nextsleep)
|
||||
# Mirror what BaseServer handle_request would do
|
||||
try:
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
except IOError:
|
||||
# we ignore interrupted calls
|
||||
pass
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in self._idlefuns.items():
|
||||
@@ -288,16 +277,13 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
self.observer_only = observer_only
|
||||
self.featureset = featureset
|
||||
|
||||
def connect(self, token = None):
|
||||
if token is None:
|
||||
if self.observer_only:
|
||||
token = "observer"
|
||||
else:
|
||||
token = self.connection.addClient()
|
||||
|
||||
def connect(self):
|
||||
if not self.observer_only:
|
||||
token = self.connection.addClient()
|
||||
else:
|
||||
token = "observer"
|
||||
if token is None:
|
||||
return None
|
||||
|
||||
self.transport.set_connection_token(token)
|
||||
|
||||
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
|
||||
@@ -306,8 +292,6 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
|
||||
_, error = self.connection.runCommand(["setFeatures", self.featureset])
|
||||
if error:
|
||||
# disconnect the client, we can't make the setFeature work
|
||||
self.connection.removeClient()
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
@@ -348,9 +332,7 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
|
||||
class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
|
||||
def __init__(self, observer_only = False, token = None):
|
||||
self.token = token
|
||||
|
||||
def __init__(self, observer_only = False):
|
||||
self.observer_only = observer_only
|
||||
# if we need extra caches, just tell the server to load them all
|
||||
pass
|
||||
@@ -358,14 +340,37 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
def saveConnectionDetails(self, remote):
|
||||
self.remote = remote
|
||||
|
||||
def saveConnectionConfigParams(self, configParams):
|
||||
self.configParams = configParams
|
||||
|
||||
def establishConnection(self, featureset):
|
||||
# The format of "remote" must be "server:port"
|
||||
try:
|
||||
[host, port] = self.remote.split(":")
|
||||
port = int(port)
|
||||
except Exception as e:
|
||||
bb.warn("Failed to read remote definition (%s)" % str(e))
|
||||
raise e
|
||||
bb.fatal("Failed to read remote definition (%s)" % str(e))
|
||||
|
||||
# use automatic port if port set to -1, meaning read it from
|
||||
# the bitbake.lock file
|
||||
if port == -1:
|
||||
lock_location = "%s/bitbake.lock" % self.configParams.environment.get('BUILDDIR')
|
||||
lock = bb.utils.lockfile(lock_location, False, False)
|
||||
if lock:
|
||||
# This means there is no server running which we can
|
||||
# connect to on the local system.
|
||||
bb.utils.unlockfile(lock)
|
||||
return None
|
||||
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
self.remote = remotedef
|
||||
except Exception as e:
|
||||
bb.fatal("Failed to read bitbake.lock (%s)" % str(e))
|
||||
|
||||
# We need our IP for the server connection. We get the IP
|
||||
# by trying to connect with the server
|
||||
@@ -375,15 +380,13 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
ip = s.getsockname()[0]
|
||||
s.close()
|
||||
except Exception as e:
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
return self.connection.connect()
|
||||
except Exception as e:
|
||||
bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
|
||||
def endSession(self):
|
||||
self.connection.removeClient()
|
||||
|
||||
@@ -62,13 +62,6 @@ class SignatureGenerator(object):
|
||||
def dump_sigs(self, dataCache, options):
|
||||
return
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values = data
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
"""
|
||||
@@ -192,25 +185,22 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k][f] = cs
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
taskdep = dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
data = data + str(uuid.uuid4())
|
||||
data = data + cs
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
logger.warn("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
|
||||
def set_taskdata(self, hashes, deps, checksums):
|
||||
self.runtaskdeps = deps
|
||||
self.taskhash = hashes
|
||||
self.file_checksum_values = checksums
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
k = fn + "." + task
|
||||
if runtime == "customfile":
|
||||
@@ -303,9 +293,10 @@ def dump_this_task(outfile, d):
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
||||
|
||||
def clean_basepath(a):
|
||||
b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
|
||||
if a.startswith("virtual:"):
|
||||
b = b + ":" + a.rsplit(":", 1)[0]
|
||||
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
||||
else:
|
||||
b = a.rsplit("/", 1)[1]
|
||||
return b
|
||||
|
||||
def clean_basepaths(a):
|
||||
@@ -314,12 +305,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -419,17 +404,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
@@ -506,17 +480,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -121,12 +121,6 @@ class DataExpansions(unittest.TestCase):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -265,20 +259,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "")
|
||||
|
||||
def test_remove_expansion(self):
|
||||
self.d.setVar("BAR", "Z")
|
||||
self.d.setVar("TEST", "${BAR}/X Y")
|
||||
self.d.setVar("TEST_remove", "${BAR}/X")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "Y")
|
||||
|
||||
def test_remove_expansion_items(self):
|
||||
self.d.setVar("TEST", "A B C D")
|
||||
self.d.setVar("BAR", "B D")
|
||||
self.d.setVar("TEST_remove", "${BAR}")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "A C")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -319,39 +299,3 @@ class TestFlags(unittest.TestCase):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
class Contains(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("SOMEFLAG", "a b c")
|
||||
|
||||
def test_contains(self):
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
|
||||
|
||||
def test_contains_any(self):
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
|
||||
|
||||
@@ -24,7 +24,6 @@ import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -315,7 +314,6 @@ class URITest(unittest.TestCase):
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.origdir = os.getcwd()
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
@@ -327,7 +325,6 @@ class FetcherTest(unittest.TestCase):
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
os.chdir(self.origdir)
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
class MirrorUriTest(FetcherTest):
|
||||
@@ -447,13 +444,6 @@ class FetcherLocalTest(FetcherTest):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
|
||||
def test_local_subdirparam(self):
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/a'])
|
||||
|
||||
def test_local_deepsubdirparam(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/dir/subdir/e'])
|
||||
|
||||
class FetcherNetworkTest(FetcherTest):
|
||||
|
||||
@@ -568,83 +558,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchMethodTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "https://github.com/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("REGEX_URI", k[2])
|
||||
self.d.setVar("REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
@@ -36,10 +36,6 @@ class VerCmpString(unittest.TestCase):
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
|
||||
self.assertTrue(result > 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
|
||||
@@ -25,12 +25,12 @@ import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state, BBCooker, CookerFeatures
|
||||
from bb.cooker import state, BBCooker
|
||||
from bb.cookerdata import CookerConfiguration, ConfigParameters
|
||||
import bb.fetch2
|
||||
|
||||
class Tinfoil:
|
||||
def __init__(self, output=sys.stdout, tracking=False):
|
||||
def __init__(self, output=sys.stdout):
|
||||
# Needed to avoid deprecation warnings with python 2.6
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
@@ -48,10 +48,7 @@ class Tinfoil:
|
||||
configparams = TinfoilConfigParameters(parse_only=True)
|
||||
self.config.setConfigParameters(configparams)
|
||||
self.config.setServerRegIdleCallback(self.register_idle_function)
|
||||
features = []
|
||||
if tracking:
|
||||
features.append(CookerFeatures.BASEDATASTORE_TRACKING)
|
||||
self.cooker = BBCooker(self.config, features)
|
||||
self.cooker = BBCooker(self.config)
|
||||
self.config_data = self.cooker.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
@@ -84,18 +81,13 @@ class Tinfoil:
|
||||
else:
|
||||
self.parseRecipes()
|
||||
|
||||
def shutdown(self):
|
||||
self.cooker.shutdown(force=True)
|
||||
self.cooker.post_serve()
|
||||
self.cooker.unlockBitbake()
|
||||
|
||||
class TinfoilConfigParameters(ConfigParameters):
|
||||
|
||||
def __init__(self, **options):
|
||||
self.initial_options = options
|
||||
super(TinfoilConfigParameters, self).__init__()
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
class DummyOptions:
|
||||
def __init__(self, initial_options):
|
||||
for key, val in initial_options.items():
|
||||
|
||||
@@ -22,24 +22,16 @@ import bb
|
||||
import re
|
||||
import ast
|
||||
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toaster.toastermain.settings")
|
||||
|
||||
import toaster.toastermain.settings as toaster_django_settings
|
||||
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
||||
from toaster.orm.models import Target_Image_File, BuildArtifact
|
||||
from toaster.orm.models import Target_Image_File
|
||||
from toaster.orm.models import Variable, VariableHistory
|
||||
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
|
||||
from toaster.orm.models import Task_Dependency, Package_Dependency
|
||||
from toaster.orm.models import Recipe_Dependency
|
||||
from bb.msg import BBLogFormatter as format
|
||||
from django.db import models
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
from django.db import transaction, connection
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
|
||||
class NotExisting(Exception):
|
||||
pass
|
||||
@@ -51,59 +43,10 @@ class ORMWrapper(object):
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.layer_version_objects = []
|
||||
self.task_objects = {}
|
||||
self.recipe_objects = {}
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _build_key(**kwargs):
|
||||
key = "0"
|
||||
for k in sorted(kwargs.keys()):
|
||||
if isinstance(kwargs[k], models.Model):
|
||||
key += "-%d" % kwargs[k].id
|
||||
else:
|
||||
key += "-%s" % str(kwargs[k])
|
||||
return key
|
||||
|
||||
|
||||
def _cached_get_or_create(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get_or_create. We assume that the objects will not be created in the
|
||||
database through any other means.
|
||||
"""
|
||||
|
||||
assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
created = False
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.create(**kwargs)
|
||||
created = True
|
||||
|
||||
return (vars(self)[dictname][key], created)
|
||||
|
||||
|
||||
def _cached_get(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get. We assume that the objects will not change in the database between gets.
|
||||
"""
|
||||
assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.get(**kwargs)
|
||||
|
||||
return vars(self)[dictname][key]
|
||||
|
||||
def create_build_object(self, build_info, brbe):
|
||||
def create_build_object(self, build_info):
|
||||
assert 'machine' in build_info
|
||||
assert 'distro' in build_info
|
||||
assert 'distro_version' in build_info
|
||||
@@ -122,18 +65,6 @@ class ORMWrapper(object):
|
||||
build_name=build_info['build_name'],
|
||||
bitbake_version=build_info['bitbake_version'])
|
||||
|
||||
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
||||
if brbe is not None:
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
buildrequest.build = build
|
||||
buildrequest.save()
|
||||
|
||||
build.project_id = buildrequest.project_id
|
||||
build.save()
|
||||
return build
|
||||
|
||||
def create_target_objects(self, target_info):
|
||||
@@ -145,7 +76,7 @@ class ORMWrapper(object):
|
||||
tgt_object = Target.objects.create( build = target_info['build'],
|
||||
target = tgt_name,
|
||||
is_image = False,
|
||||
)
|
||||
);
|
||||
targets.append(tgt_object)
|
||||
return targets
|
||||
|
||||
@@ -165,7 +96,8 @@ class ORMWrapper(object):
|
||||
build.outcome = outcome
|
||||
build.save()
|
||||
|
||||
def update_target_set_license_manifest(self, target, license_manifest_path):
|
||||
def update_target_object(self, target, license_manifest_path):
|
||||
|
||||
target.license_manifest_path = license_manifest_path
|
||||
target.save()
|
||||
|
||||
@@ -174,47 +106,39 @@ class ORMWrapper(object):
|
||||
assert 'recipe' in task_information
|
||||
assert 'task_name' in task_information
|
||||
|
||||
# we use must_exist info for database look-up optimization
|
||||
task_object, created = self._cached_get_or_create(Task,
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name']
|
||||
)
|
||||
if created and must_exist:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
task_object, created = Task.objects.get_or_create(
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name'],
|
||||
)
|
||||
|
||||
if must_exist and created:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
task_object.delete()
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(task_object):
|
||||
if v in task_information.keys():
|
||||
if vars(task_object)[v] != task_information[v]:
|
||||
vars(task_object)[v] = task_information[v]
|
||||
object_changed = True
|
||||
vars(task_object)[v] = task_information[v]
|
||||
|
||||
# update setscene-related information if the task has a setscene
|
||||
if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
object_changed = True
|
||||
# update setscene-related information
|
||||
if 1 == Task.objects.related_setscene(task_object).count():
|
||||
if task_object.outcome == Task.OUTCOME_COVERED:
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
|
||||
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
|
||||
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
|
||||
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
|
||||
task_object.sstate_result = Task.SSTATE_RESTORED
|
||||
object_changed = True
|
||||
elif outcome_task_setscene == Task.OUTCOME_FAILED:
|
||||
task_object.sstate_result = Task.SSTATE_FAILED
|
||||
object_changed = True
|
||||
|
||||
# mark down duration if we have a start time and a current time
|
||||
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
|
||||
duration = task_information['end_time'] - task_information['start_time']
|
||||
task_object.elapsed_time = duration
|
||||
object_changed = True
|
||||
del task_information['start_time']
|
||||
del task_information['end_time']
|
||||
|
||||
if object_changed:
|
||||
task_object.save()
|
||||
task_object.save()
|
||||
return task_object
|
||||
|
||||
|
||||
@@ -222,22 +146,20 @@ class ORMWrapper(object):
|
||||
assert 'layer_version' in recipe_information
|
||||
assert 'file_path' in recipe_information
|
||||
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
if created and must_exist:
|
||||
recipe_object, created = Recipe.objects.get_or_create(
|
||||
layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
|
||||
if must_exist and created:
|
||||
recipe_object.delete()
|
||||
raise NotExisting("Recipe object created when expected to exist", recipe_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(recipe_object):
|
||||
if v in recipe_information.keys():
|
||||
object_changed = True
|
||||
vars(recipe_object)[v] = recipe_information[v]
|
||||
|
||||
if object_changed:
|
||||
recipe_object.save()
|
||||
recipe_object.save()
|
||||
|
||||
return recipe_object
|
||||
|
||||
@@ -256,53 +178,19 @@ class ORMWrapper(object):
|
||||
priority = layer_version_information['priority']
|
||||
)
|
||||
|
||||
self.layer_version_objects.append(layer_version_object)
|
||||
|
||||
return layer_version_object
|
||||
|
||||
def get_update_layer_object(self, layer_information, brbe):
|
||||
def get_update_layer_object(self, layer_information):
|
||||
assert 'name' in layer_information
|
||||
assert 'local_path' in layer_information
|
||||
assert 'layer_index_url' in layer_information
|
||||
|
||||
if brbe is None:
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
local_path=layer_information['local_path'],
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
return layer_object
|
||||
else:
|
||||
# we are under managed mode; we must match the layer used in the Project Layer
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br_id, be_id = brbe.split(":")
|
||||
|
||||
# find layer by checkout path;
|
||||
from bldcontrol import bbcontroller
|
||||
bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
|
||||
|
||||
# we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
|
||||
# but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
|
||||
|
||||
# note that this is different
|
||||
buildrequest = BuildRequest.objects.get(pk = br_id)
|
||||
for brl in buildrequest.brlayer_set.all():
|
||||
localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
|
||||
# we get a relative path, unless running in HEAD mode where the path is absolute
|
||||
if not localdirname.startswith("/"):
|
||||
localdirname = os.path.join(bc.be.sourcedir, localdirname)
|
||||
#logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
|
||||
if localdirname.startswith(layer_information['local_path']):
|
||||
# we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
|
||||
#logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
|
||||
for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
|
||||
if pl.layercommit.layer.vcs_url == brl.giturl :
|
||||
layer = pl.layercommit.layer
|
||||
layer.local_path = layer_information['local_path']
|
||||
layer.save()
|
||||
return layer
|
||||
|
||||
raise NotExisting("Unidentified layer %s" % pformat(layer_information))
|
||||
|
||||
return layer_object
|
||||
|
||||
def save_target_file_information(self, build_obj, target_obj, filedata):
|
||||
assert isinstance(build_obj, Build)
|
||||
@@ -334,7 +222,7 @@ class ORMWrapper(object):
|
||||
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
||||
if len(parent_path) == 0:
|
||||
parent_path = "/"
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
@@ -368,7 +256,7 @@ class ORMWrapper(object):
|
||||
permission = permission,
|
||||
owner = user,
|
||||
group = group)
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj.directory = parent_obj
|
||||
tf_obj.save()
|
||||
|
||||
@@ -419,16 +307,12 @@ class ORMWrapper(object):
|
||||
errormsg = ""
|
||||
for p in packagedict:
|
||||
searchname = p
|
||||
if p not in pkgpnmap:
|
||||
logger.warning("Image packages list contains %p, but is"
|
||||
" missing from all packages list where the"
|
||||
" metadata comes from. Skipping...", p)
|
||||
continue
|
||||
if 'OPKGN' in pkgpnmap[p].keys():
|
||||
searchname = pkgpnmap[p]['OPKGN']
|
||||
|
||||
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
|
||||
if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
|
||||
if created:
|
||||
# package was not build in the current build, but
|
||||
# fill in everything we can from the runtime-reverse package data
|
||||
try:
|
||||
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
|
||||
@@ -442,14 +326,11 @@ class ORMWrapper(object):
|
||||
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
|
||||
|
||||
# no files recorded for this package, so save files info
|
||||
packagefile_objects = []
|
||||
for targetpath in pkgpnmap[p]['FILES_INFO']:
|
||||
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
|
||||
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
|
||||
Package_File.objects.create( package = packagedict[p]['object'],
|
||||
path = targetpath,
|
||||
size = targetfilesize))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = targetfilesize)
|
||||
except KeyError as e:
|
||||
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
|
||||
|
||||
@@ -459,7 +340,6 @@ class ORMWrapper(object):
|
||||
|
||||
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
|
||||
|
||||
packagedeps_objs = []
|
||||
for p in packagedict:
|
||||
for (px,deptype) in packagedict[p]['depends']:
|
||||
if deptype == 'depends':
|
||||
@@ -467,39 +347,19 @@ class ORMWrapper(object):
|
||||
elif deptype == 'recommends':
|
||||
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
|
||||
|
||||
try:
|
||||
packagedeps_objs.append(Package_Dependency(
|
||||
package = packagedict[p]['object'],
|
||||
depends_on = packagedict[px]['object'],
|
||||
dep_type = tdeptype,
|
||||
target = target_obj))
|
||||
except KeyError as e:
|
||||
logger.warn("Could not add dependency to the package %s "
|
||||
"because %s is an unknown package", p, px)
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
else:
|
||||
logger.info("No package dependencies created")
|
||||
Package_Dependency.objects.create( package = packagedict[p]['object'],
|
||||
depends_on = packagedict[px]['object'],
|
||||
dep_type = tdeptype,
|
||||
target = target_obj);
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s" % errormsg)
|
||||
raise Exception(errormsg)
|
||||
|
||||
def save_target_image_file_information(self, target_obj, file_name, file_size):
|
||||
target_image_file = Target_Image_File.objects.create( target = target_obj,
|
||||
file_name = file_name,
|
||||
file_size = file_size)
|
||||
|
||||
def save_artifact_information(self, build_obj, file_name, file_size):
|
||||
# we skip the image files from other builds
|
||||
if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
|
||||
return
|
||||
|
||||
# do not update artifacts found in other builds
|
||||
if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
|
||||
return
|
||||
|
||||
BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
|
||||
target_image_file.save()
|
||||
|
||||
def create_logmessage(self, log_information):
|
||||
assert 'build' in log_information
|
||||
@@ -541,13 +401,10 @@ class ORMWrapper(object):
|
||||
bp_object.save()
|
||||
|
||||
# save any attached file information
|
||||
packagefile_objects = []
|
||||
for path in package_info['FILES_INFO']:
|
||||
packagefile_objects.append(Package_File( package = bp_object,
|
||||
fo = Package_File.objects.create( package = bp_object,
|
||||
path = path,
|
||||
size = package_info['FILES_INFO'][path] ))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = package_info['FILES_INFO'][path] )
|
||||
|
||||
def _po_byname(p):
|
||||
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
|
||||
@@ -556,44 +413,39 @@ class ORMWrapper(object):
|
||||
pkg.save()
|
||||
return pkg
|
||||
|
||||
packagedeps_objs = []
|
||||
# save soft dependency information
|
||||
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
|
||||
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
|
||||
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
|
||||
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
|
||||
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
|
||||
if 'RREPLACES' in package_info and package_info['RREPLACES']:
|
||||
for p in bb.utils.explode_deps(package_info['RREPLACES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
|
||||
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
|
||||
|
||||
return bp_object
|
||||
|
||||
def save_build_variables(self, build_obj, vardump):
|
||||
assert isinstance(build_obj, Build)
|
||||
|
||||
helptext_objects = []
|
||||
for k in vardump:
|
||||
desc = vardump[k]['doc']
|
||||
desc = vardump[k]['doc'];
|
||||
if desc is None:
|
||||
var_words = [word for word in k.split('_')]
|
||||
root_var = "_".join([word for word in var_words if word.isupper()])
|
||||
@@ -601,33 +453,25 @@ class ORMWrapper(object):
|
||||
desc = vardump[root_var]['doc']
|
||||
if desc is None:
|
||||
desc = ''
|
||||
if len(desc):
|
||||
helptext_objects.append(HelpText(build=build_obj,
|
||||
if desc:
|
||||
helptext_obj = HelpText.objects.create(build=build_obj,
|
||||
area=HelpText.VARIABLE,
|
||||
key=k,
|
||||
text=desc))
|
||||
text=desc)
|
||||
if not bool(vardump[k]['func']):
|
||||
value = vardump[k]['v']
|
||||
value = vardump[k]['v'];
|
||||
if value is None:
|
||||
value = ''
|
||||
variable_obj = Variable.objects.create( build = build_obj,
|
||||
variable_name = k,
|
||||
variable_value = value,
|
||||
description = desc)
|
||||
|
||||
varhist_objects = []
|
||||
for vh in vardump[k]['history']:
|
||||
if not 'documentation.conf' in vh['file']:
|
||||
varhist_objects.append(VariableHistory( variable = variable_obj,
|
||||
VariableHistory.objects.create( variable = variable_obj,
|
||||
file_name = vh['file'],
|
||||
line_number = vh['line'],
|
||||
operation = vh['op']))
|
||||
if len(varhist_objects):
|
||||
VariableHistory.objects.bulk_create(varhist_objects)
|
||||
|
||||
HelpText.objects.bulk_create(helptext_objects)
|
||||
|
||||
class MockEvent: pass # sometimes we mock an event, declare it here
|
||||
operation = vh['op'])
|
||||
|
||||
class BuildInfoHelper(object):
|
||||
""" This class gathers the build information from the server and sends it
|
||||
@@ -636,23 +480,15 @@ class BuildInfoHelper(object):
|
||||
Keeps in memory all data that needs matching before writing it to the database
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, server, has_build_history = False):
|
||||
self._configure_django()
|
||||
self.internal_state = {}
|
||||
self.internal_state['taskdata'] = {}
|
||||
self.task_order = 0
|
||||
self.autocommit_step = 1
|
||||
self.server = server
|
||||
# we use manual transactions if the database doesn't autocommit on us
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
self.orm_wrapper = ORMWrapper()
|
||||
self.has_build_history = has_build_history
|
||||
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
|
||||
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
|
||||
logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
|
||||
|
||||
|
||||
def _configure_django(self):
|
||||
# Add toaster to sys path for importing modules
|
||||
@@ -696,47 +532,19 @@ class BuildInfoHelper(object):
|
||||
assert path.startswith("/")
|
||||
assert 'build' in self.internal_state
|
||||
|
||||
if self.brbe is None:
|
||||
def _slkey_interactive(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
return len(layer_version.layer.local_path)
|
||||
def _slkey(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
return len(layer_version.layer.local_path)
|
||||
|
||||
# Heuristics: we always match recipe to the deepest layer path in the discovered layers
|
||||
for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
|
||||
# we can match to the recipe file path
|
||||
if path.startswith(lvo.layer.local_path):
|
||||
return lvo
|
||||
# Heuristics: we always match recipe to the deepest layer path that
|
||||
# we can match to the recipe file path
|
||||
for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
|
||||
if (path.startswith(bl.layer.local_path)):
|
||||
return bl
|
||||
|
||||
else:
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController
|
||||
from bldcontrol.models import BuildRequest
|
||||
bc = getBuildEnvironmentController(pk = be_id)
|
||||
|
||||
def _slkey_managed(layer_version):
|
||||
return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
|
||||
|
||||
# Heuristics: we match the path to where the layers have been checked out
|
||||
for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
|
||||
localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
|
||||
# we get a relative path, unless running in HEAD mode where the path is absolute
|
||||
if not localdirname.startswith("/"):
|
||||
localdirname = os.path.join(bc.be.sourcedir, localdirname)
|
||||
if path.startswith(localdirname):
|
||||
#logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
|
||||
# we matched the BRLayer, but we need the layer_version that generated this br
|
||||
for lvo in self.orm_wrapper.layer_version_objects:
|
||||
if brl.name == lvo.layer.name:
|
||||
return lvo
|
||||
|
||||
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
||||
logger.error("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
|
||||
|
||||
#mockup the new layer
|
||||
unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", local_path="/", layer_index_url="")
|
||||
unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
|
||||
|
||||
return unknown_layer_version_obj
|
||||
#TODO: if we get here, we didn't read layers correctly
|
||||
assert False
|
||||
return None
|
||||
|
||||
def _get_recipe_information_from_taskfile(self, taskfile):
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
@@ -779,43 +587,27 @@ class BuildInfoHelper(object):
|
||||
|
||||
################################
|
||||
## external available methods to store information
|
||||
@staticmethod
|
||||
def _get_data_from_event(event):
|
||||
evdata = None
|
||||
if '_localdata' in vars(event):
|
||||
evdata = event._localdata
|
||||
elif 'data' in vars(event):
|
||||
evdata = event.data
|
||||
else:
|
||||
raise Exception("Event with neither _localdata or data properties")
|
||||
return evdata
|
||||
|
||||
def store_layer_info(self, event):
|
||||
layerinfos = BuildInfoHelper._get_data_from_event(event)
|
||||
assert 'data' in vars(event)
|
||||
layerinfos = event.data
|
||||
self.internal_state['lvs'] = {}
|
||||
for layer in layerinfos:
|
||||
try:
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
||||
except NotExisting as nee:
|
||||
logger.warn("buildinfohelper: cannot identify layer exception:%s " % nee)
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
|
||||
|
||||
|
||||
def store_started_build(self, event):
|
||||
assert '_pkgs' in vars(event)
|
||||
build_information = self._get_build_information()
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe)
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information)
|
||||
self.internal_state['build'] = build_obj
|
||||
|
||||
# save layer version information for this build
|
||||
if not 'lvs' in self.internal_state:
|
||||
logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
|
||||
else:
|
||||
for layer_obj in self.internal_state['lvs']:
|
||||
self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
|
||||
for layer_obj in self.internal_state['lvs']:
|
||||
self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
|
||||
|
||||
del self.internal_state['lvs']
|
||||
del self.internal_state['lvs']
|
||||
|
||||
# create target information
|
||||
target_information = {}
|
||||
@@ -825,27 +617,16 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
|
||||
|
||||
# Save build configuration
|
||||
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
|
||||
self.orm_wrapper.save_build_variables(build_obj, data)
|
||||
|
||||
return self.brbe
|
||||
|
||||
self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
|
||||
|
||||
def update_target_image_file(self, event):
|
||||
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
output_files = list(evdata.viewkeys())
|
||||
output_files = list(event.data.viewkeys())
|
||||
for output in output_files:
|
||||
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
|
||||
|
||||
def update_artifact_image_file(self, event):
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
for artifact_path in evdata.keys():
|
||||
self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
|
||||
if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, event.data[output])
|
||||
|
||||
def update_build_information(self, event, errors, warnings, taskfailures):
|
||||
if 'build' in self.internal_state:
|
||||
@@ -853,12 +634,12 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_license_manifest_path(self, event):
|
||||
deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
|
||||
image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
|
||||
path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
|
||||
deploy_dir = event.data['deploy_dir']
|
||||
image_name = event.data['image_name']
|
||||
path = deploy_dir + "/licenses/" + image_name + "/"
|
||||
for target in self.internal_state['targets']:
|
||||
if target.target in image_name:
|
||||
self.orm_wrapper.update_target_set_license_manifest(target, path)
|
||||
self.orm_wrapper.update_target_object(target, path)
|
||||
|
||||
|
||||
def store_started_task(self, event):
|
||||
@@ -902,21 +683,14 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_tasks_stats(self, event):
|
||||
for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
|
||||
for (taskfile, taskname, taskstats, recipename) in event.data:
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
assert localfilepath.startswith("/")
|
||||
|
||||
recipe_information = self._get_recipe_information_from_taskfile(taskfile)
|
||||
try:
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
|
||||
recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
|
||||
file_path__endswith = recipe_information['file_path'],
|
||||
name = recipename)
|
||||
except Recipe.DoesNotExist:
|
||||
logger.error("Could not find recipe for recipe_information %s name %s" % (pformat(recipe_information), recipename))
|
||||
raise
|
||||
|
||||
task_information = {}
|
||||
task_information['build'] = self.internal_state['build']
|
||||
@@ -924,8 +698,6 @@ class BuildInfoHelper(object):
|
||||
task_information['task_name'] = taskname
|
||||
task_information['cpu_usage'] = taskstats['cpu_usage']
|
||||
task_information['disk_io'] = taskstats['disk_io']
|
||||
if 'elapsed_time' in taskstats:
|
||||
task_information['elapsed_time'] = taskstats['elapsed_time']
|
||||
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
||||
|
||||
def update_and_store_task(self, event):
|
||||
@@ -979,22 +751,16 @@ class BuildInfoHelper(object):
|
||||
task_information['outcome'] = Task.OUTCOME_FAILED
|
||||
del self.internal_state['taskdata'][identifier]
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
# we force a sync point here, to get the progress bar to show
|
||||
if self.autocommit_step % 3 == 0:
|
||||
transaction.set_autocommit(True)
|
||||
transaction.set_autocommit(False)
|
||||
self.autocommit_step += 1
|
||||
|
||||
self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
||||
|
||||
|
||||
def store_missed_state_tasks(self, event):
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event.data['missed']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
||||
class MockEvent: pass
|
||||
mevent = MockEvent()
|
||||
mevent.taskname = taskname
|
||||
mevent.taskhash = taskhash
|
||||
@@ -1008,11 +774,12 @@ class BuildInfoHelper(object):
|
||||
|
||||
self.orm_wrapper.get_update_task_object(task_information)
|
||||
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event.data['found']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
||||
class MockEvent: pass
|
||||
mevent = MockEvent()
|
||||
mevent.taskname = taskname
|
||||
mevent.taskhash = taskhash
|
||||
@@ -1024,24 +791,21 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_target_package_data(self, event):
|
||||
assert 'data' in vars(event)
|
||||
# for all image targets
|
||||
for target in self.internal_state['targets']:
|
||||
if target.is_image:
|
||||
pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
|
||||
imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
|
||||
filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
|
||||
|
||||
try:
|
||||
pkgdata = event.data['pkgdata']
|
||||
imgdata = event.data['imgdata'][target.target]
|
||||
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
|
||||
except KeyError as e:
|
||||
logger.warn("KeyError in save_target_package_information"
|
||||
"%s ", e)
|
||||
|
||||
try:
|
||||
filedata = event.data['filedata'][target.target]
|
||||
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
|
||||
except KeyError as e:
|
||||
logger.warn("KeyError in save_target_file_information"
|
||||
"%s ", e)
|
||||
except KeyError:
|
||||
# we must have not got the data for this image, nothing to save
|
||||
pass
|
||||
|
||||
|
||||
|
||||
def store_dependency_information(self, event):
|
||||
assert '_depgraph' in vars(event)
|
||||
@@ -1071,29 +835,14 @@ class BuildInfoHelper(object):
|
||||
|
||||
recipe_info = {}
|
||||
recipe_info['name'] = pn
|
||||
recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
|
||||
recipe_info['layer_version'] = layer_version_obj
|
||||
|
||||
if 'version' in event._depgraph['pn'][pn]:
|
||||
recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
|
||||
|
||||
if 'summary' in event._depgraph['pn'][pn]:
|
||||
recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
|
||||
|
||||
if 'license' in event._depgraph['pn'][pn]:
|
||||
recipe_info['license'] = event._depgraph['pn'][pn]['license']
|
||||
|
||||
if 'description' in event._depgraph['pn'][pn]:
|
||||
recipe_info['description'] = event._depgraph['pn'][pn]['description']
|
||||
|
||||
if 'section' in event._depgraph['pn'][pn]:
|
||||
recipe_info['section'] = event._depgraph['pn'][pn]['section']
|
||||
|
||||
if 'homepage' in event._depgraph['pn'][pn]:
|
||||
recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
|
||||
|
||||
if 'bugtracker' in event._depgraph['pn'][pn]:
|
||||
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
||||
|
||||
recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
|
||||
recipe_info['license'] = event._depgraph['pn'][pn]['license']
|
||||
recipe_info['description'] = event._depgraph['pn'][pn]['description']
|
||||
recipe_info['section'] = event._depgraph['pn'][pn]['section']
|
||||
recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
|
||||
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
||||
recipe_info['file_path'] = file_name
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
|
||||
recipe.is_image = False
|
||||
@@ -1115,22 +864,20 @@ class BuildInfoHelper(object):
|
||||
|
||||
# save recipe dependency
|
||||
# buildtime
|
||||
recipedeps_objects = []
|
||||
for recipe in event._depgraph['depends']:
|
||||
try:
|
||||
target = self.internal_state['recipes'][recipe]
|
||||
for dep in event._depgraph['depends'][recipe]:
|
||||
dependency = self.internal_state['recipes'][dep]
|
||||
recipedeps_objects.append(Recipe_Dependency( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
|
||||
Recipe_Dependency.objects.get_or_create( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
|
||||
except KeyError as e:
|
||||
if e not in assume_provided and not str(e).startswith("virtual/"):
|
||||
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
|
||||
Recipe_Dependency.objects.bulk_create(recipedeps_objects)
|
||||
|
||||
# save all task information
|
||||
def _save_a_task(taskdesc):
|
||||
spec = re.split(r'\.', taskdesc)
|
||||
spec = re.split(r'\.', taskdesc);
|
||||
pn = ".".join(spec[0:-1])
|
||||
taskname = spec[-1]
|
||||
e = event
|
||||
@@ -1147,7 +894,6 @@ class BuildInfoHelper(object):
|
||||
tasks[taskdesc] = _save_a_task(taskdesc)
|
||||
|
||||
# create dependencies between tasks
|
||||
taskdeps_objects = []
|
||||
for taskdesc in event._depgraph['tdepends']:
|
||||
target = tasks[taskdesc]
|
||||
for taskdep in event._depgraph['tdepends'][taskdesc]:
|
||||
@@ -1156,111 +902,63 @@ class BuildInfoHelper(object):
|
||||
dep = _save_a_task(taskdep)
|
||||
else:
|
||||
dep = tasks[taskdep]
|
||||
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
|
||||
Task_Dependency.objects.bulk_create(taskdeps_objects)
|
||||
Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
logger.warn("buildinfohelper: dependency info not identify recipes: \n%s" % errormsg)
|
||||
raise Exception(errormsg)
|
||||
|
||||
|
||||
def store_build_package_information(self, event):
|
||||
package_info = BuildInfoHelper._get_data_from_event(event)
|
||||
assert 'data' in vars(event)
|
||||
package_info = event.data
|
||||
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
|
||||
package_info,
|
||||
self.internal_state['recipes'],
|
||||
)
|
||||
|
||||
def _store_build_done(self, errorcode):
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
be.lock = BuildEnvironment.LOCK_LOCK
|
||||
be.save()
|
||||
br = BuildRequest.objects.get(pk = br_id)
|
||||
if errorcode == 0:
|
||||
# request archival of the project artifacts
|
||||
br.state = BuildRequest.REQ_ARCHIVE
|
||||
else:
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
def _store_log_information(self, level, text):
|
||||
log_information = {}
|
||||
log_information['build'] = self.internal_state['build']
|
||||
log_information['level'] = level
|
||||
log_information['message'] = text
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def store_log_info(self, text):
|
||||
self._store_log_information(LogMessage.INFO, text)
|
||||
|
||||
def store_log_warn(self, text):
|
||||
self._store_log_information(LogMessage.WARNING, text)
|
||||
|
||||
def store_log_error(self, text):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = format.ERROR
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = '-- None'
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
def store_log_exception(self, text, backtrace = ""):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = -1
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = backtrace
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
self._store_log_information(LogMessage.ERROR, text)
|
||||
|
||||
def store_log_event(self, event):
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
print "Saving stored event ", tempevent
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
del self.internal_state['backlog']
|
||||
|
||||
if event.levelno < format.WARNING:
|
||||
return
|
||||
|
||||
if 'args' in vars(event):
|
||||
event.msg = event.msg % event.args
|
||||
|
||||
if not 'build' in self.internal_state:
|
||||
if self.brbe is None:
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
else: # we're under Toaster control, post the errors to the build request
|
||||
from bldcontrol.models import BuildRequest, BRError
|
||||
br, be = self.brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
|
||||
print "Save event for later"
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
|
||||
return
|
||||
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
# if we have a backlog of events, do our best to save them here
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
|
||||
del self.internal_state['backlog']
|
||||
|
||||
log_information = {}
|
||||
log_information['build'] = self.internal_state['build']
|
||||
if event.levelno == format.ERROR:
|
||||
if event.levelno >= format.ERROR:
|
||||
log_information['level'] = LogMessage.ERROR
|
||||
elif event.levelno == format.WARNING:
|
||||
log_information['level'] = LogMessage.WARNING
|
||||
elif event.levelno == -1: # toaster self-logging
|
||||
log_information['level'] = -1
|
||||
else:
|
||||
log_information['level'] = LogMessage.INFO
|
||||
|
||||
log_information['message'] = event.msg
|
||||
log_information['pathname'] = event.pathname
|
||||
log_information['lineno'] = event.lineno
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def close(self, errorcode):
|
||||
if self.brbe is not None:
|
||||
self._store_build_done(errorcode)
|
||||
|
||||
if 'backlog' in self.internal_state:
|
||||
if 'build' in self.internal_state:
|
||||
# we save missed events in the database for the current build
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
# we have no build, and we still have events; something amazingly wrong happend
|
||||
for event in self.internal_state['backlog']:
|
||||
logger.error("UNSAVED log: %s", event.msg)
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
|
||||
@@ -230,7 +230,10 @@ class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
self.configuration.sstatemirror = ""
|
||||
for mirror in self.sstatemirrors_list:
|
||||
if mirror[1] != "" and mirror[2].startswith("file://"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
if mirror[1].endswith("\\1"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
else:
|
||||
smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
|
||||
self.configuration.sstatemirror += smirror
|
||||
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
|
||||
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import gobject
|
||||
import gtk
|
||||
import Queue
|
||||
@@ -199,7 +198,7 @@ def main(server, eventHandler, params):
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
if 'msg' in cmdline and cmdline['msg']:
|
||||
print(cmdline['msg'])
|
||||
logger.error(cmdline['msg'])
|
||||
return 1
|
||||
cmdline = cmdline['action']
|
||||
if not cmdline or cmdline[0] != "generateDotGraph":
|
||||
@@ -216,12 +215,6 @@ def main(server, eventHandler, params):
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
try:
|
||||
gtk.init_check()
|
||||
except RuntimeError:
|
||||
sys.stderr.write("Please set DISPLAY variable before running this command \n")
|
||||
return
|
||||
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
@@ -243,7 +236,7 @@ def main(server, eventHandler, params):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
break
|
||||
|
||||
@@ -271,7 +271,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
server.terminateServer()
|
||||
return
|
||||
|
||||
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
|
||||
if consolelogfile and not params.options.show_environment:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
@@ -284,7 +284,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if not params.observe_only:
|
||||
params.updateFromServer(server)
|
||||
params.updateToServer(server, os.environ.copy())
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
@@ -322,8 +321,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
break
|
||||
termfilter.updateFooter()
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if event is None:
|
||||
continue
|
||||
if event is None:
|
||||
continue
|
||||
helper.eventHandler(event)
|
||||
if isinstance(event, bb.runqueue.runQueueExitWait):
|
||||
if not main.shutdown:
|
||||
@@ -352,7 +351,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
# but do show them if the user has changed the default log level to
|
||||
# include verbose/debug messages
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE:
|
||||
continue
|
||||
logger.handle(event)
|
||||
continue
|
||||
@@ -508,11 +507,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
termfilter.clearFooter()
|
||||
# ignore interrupted io
|
||||
if ioerror.args[0] == 4:
|
||||
continue
|
||||
sys.stderr.write(str(ioerror))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
termfilter.clearFooter()
|
||||
if params.observe_only:
|
||||
@@ -531,34 +526,25 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
logger.error("Unable to cleanly shutdown: %s" % error)
|
||||
main.shutdown = main.shutdown + 1
|
||||
pass
|
||||
except Exception as e:
|
||||
sys.stderr.write(str(e))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
try:
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
"\nSummary: %s tasks failed:", len(taskfailures))
|
||||
for failure in taskfailures:
|
||||
summary += "\n %s" % failure
|
||||
if warnings:
|
||||
summary += pluralise("\nSummary: There was %s WARNING message shown.",
|
||||
"\nSummary: There were %s WARNING messages shown.", warnings)
|
||||
if return_value and errors:
|
||||
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
|
||||
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
|
||||
if summary:
|
||||
print(summary)
|
||||
|
||||
if interrupted:
|
||||
print("Execution was interrupted, returning a non-zero exit code.")
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
except IOError as e:
|
||||
import errno
|
||||
if e.errno == errno.EPIPE:
|
||||
pass
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
"\nSummary: %s tasks failed:", len(taskfailures))
|
||||
for failure in taskfailures:
|
||||
summary += "\n %s" % failure
|
||||
if warnings:
|
||||
summary += pluralise("\nSummary: There was %s WARNING message shown.",
|
||||
"\nSummary: There were %s WARNING messages shown.", warnings)
|
||||
if return_value and errors:
|
||||
summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
|
||||
"\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
|
||||
if summary:
|
||||
print(summary)
|
||||
|
||||
if interrupted:
|
||||
print("Execution was interrupted, returning a non-zero exit code.")
|
||||
if return_value == 0:
|
||||
return_value = 1
|
||||
|
||||
return return_value
|
||||
|
||||
@@ -361,13 +361,13 @@ class NCursesUI:
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
def main(server, eventHandler):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
|
||||
return
|
||||
ui = NCursesUI()
|
||||
try:
|
||||
curses.wrapper(ui.main, server, eventHandler, params)
|
||||
curses.wrapper(ui.main, server, eventHandler)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -41,7 +41,7 @@ import sys
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
@@ -58,15 +58,19 @@ def _log_settings_from_server(server):
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
return includelogs, loglines, consolelogfile
|
||||
|
||||
return includelogs, loglines
|
||||
|
||||
def main(server, eventHandler, params ):
|
||||
|
||||
includelogs, loglines = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
@@ -76,16 +80,6 @@ def main(server, eventHandler, params ):
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
|
||||
includelogs, loglines, consolelogfile = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
if not params.observe_only:
|
||||
logger.error("ToasterUI can only work in observer mode")
|
||||
return
|
||||
@@ -97,26 +91,13 @@ def main(server, eventHandler, params ):
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
first = True
|
||||
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
if buildinfohelper.brbe is not None and consolelogfile:
|
||||
# if we are under managed mode we have no other UI and we need to write our own file
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if first:
|
||||
first = False
|
||||
logger.info("ToasterUI waiting for events")
|
||||
|
||||
if event is None:
|
||||
if main.shutdown > 0:
|
||||
@@ -130,12 +111,8 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
logger.warn("Logfile for task %s" % event.logfile)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.build.TaskBase):
|
||||
logger.info(event._message)
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
@@ -181,12 +158,7 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
continue
|
||||
if isinstance(event, bb.event.MultipleProviders):
|
||||
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
|
||||
event._item,
|
||||
", ".join(event._candidates))
|
||||
logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
@@ -244,34 +216,21 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = "Command Failed " + event.error
|
||||
event.msg = event.error
|
||||
event.pathname = ""
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done")
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
|
||||
logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
|
||||
server.terminateServer()
|
||||
else:
|
||||
logger.debug(1, "ToasterUI prepared for new build")
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.MetadataEvent):
|
||||
@@ -287,12 +246,8 @@ def main(server, eventHandler, params ):
|
||||
buildinfohelper.store_missed_state_tasks(event)
|
||||
elif event.type == "ImageFileSize":
|
||||
buildinfohelper.update_target_image_file(event)
|
||||
elif event.type == "ArtifactFileSize":
|
||||
buildinfohelper.update_artifact_image_file(event)
|
||||
elif event.type == "LicenseManifestPath":
|
||||
buildinfohelper.store_license_manifest_path(event)
|
||||
else:
|
||||
logger.error("Unprocessed MetadataEvent %s " % str(event))
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
@@ -325,25 +280,9 @@ def main(server, eventHandler, params ):
|
||||
main.shutdown = 1
|
||||
pass
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
logger.error(e)
|
||||
import traceback
|
||||
from pprint import pformat
|
||||
exception_data = traceback.format_exc()
|
||||
logger.error("%s\n%s" % (e, exception_data))
|
||||
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
if tb is not None:
|
||||
curr = tb
|
||||
while curr is not None:
|
||||
logger.warn("Error data dump %s\n%s\n" % (traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals)))
|
||||
curr = curr.tb_next
|
||||
|
||||
# save them to database, if possible; if it fails, we already logged to console.
|
||||
try:
|
||||
buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
|
||||
except Exception as ce:
|
||||
logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
|
||||
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
if interrupted:
|
||||
|
||||
@@ -44,27 +44,10 @@ class BBUIEventQueue:
|
||||
server.register_function( self.send_event, "event.sendpickle" )
|
||||
server.socket.settimeout(1)
|
||||
|
||||
self.EventHandler = None
|
||||
count_tries = 0
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
# the event handler registration may fail here due to cooker being in invalid state
|
||||
# this is a transient situation, and we should retry a couple of times before
|
||||
# giving up
|
||||
|
||||
while self.EventHandler == None and count_tries < 5:
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
if (self.EventHandle != None):
|
||||
break
|
||||
|
||||
bb.warn("Could not register UI event handler %s:%d, retry" % (self.host, self.port))
|
||||
count_tries += 1
|
||||
import time
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if self.EventHandle == None:
|
||||
raise Exception("Could not register UI event handler")
|
||||
if (self.EventHandle == None):
|
||||
bb.fatal("Could not register UI event handler")
|
||||
|
||||
self.server = server
|
||||
|
||||
@@ -106,12 +89,7 @@ class BBUIEventQueue:
|
||||
|
||||
self.server.timeout = 1
|
||||
while not self.server.quit:
|
||||
try:
|
||||
self.server.handle_request()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
|
||||
|
||||
self.server.handle_request()
|
||||
self.server.server_close()
|
||||
|
||||
def system_quit( self ):
|
||||
|
||||
@@ -31,7 +31,6 @@ import subprocess
|
||||
import glob
|
||||
import traceback
|
||||
import errno
|
||||
import signal
|
||||
from commands import getstatusoutput
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -54,9 +53,6 @@ def set_context(ctx):
|
||||
# Context used in better_exec, eval
|
||||
_context = clean_context()
|
||||
|
||||
class VersionStringException(Exception):
|
||||
"""Exception raised when an invalid version specification is found"""
|
||||
|
||||
def explode_version(s):
|
||||
r = []
|
||||
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
|
||||
@@ -132,28 +128,6 @@ def vercmp_string(a, b):
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
|
||||
def vercmp_string_op(a, b, op):
|
||||
"""
|
||||
Compare two versions and check if the specified comparison operator matches the result of the comparison.
|
||||
This function is fairly liberal about what operators it will accept since there are a variety of styles
|
||||
depending on the context.
|
||||
"""
|
||||
res = vercmp_string(a, b)
|
||||
if op in ('=', '=='):
|
||||
return res == 0
|
||||
elif op == '<=':
|
||||
return res <= 0
|
||||
elif op == '>=':
|
||||
return res >= 0
|
||||
elif op in ('>', '>>'):
|
||||
return res > 0
|
||||
elif op in ('<', '<<'):
|
||||
return res < 0
|
||||
elif op == '!=':
|
||||
return res != 0
|
||||
else:
|
||||
raise VersionStringException('Unsupported comparison operator "%s"' % op)
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
@@ -214,7 +188,6 @@ def explode_dep_versions2(s):
|
||||
i = i[1:]
|
||||
else:
|
||||
# This is an unsupported case!
|
||||
raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
|
||||
lastcmp = (i or "")
|
||||
i = ""
|
||||
i.strip()
|
||||
@@ -291,7 +264,7 @@ def _print_trace(body, line):
|
||||
def better_compile(text, file, realfile, mode = "exec"):
|
||||
"""
|
||||
A better compile method. This method
|
||||
will print the offending lines.
|
||||
will print the offending lines.
|
||||
"""
|
||||
try:
|
||||
return compile(text, file, mode)
|
||||
@@ -381,11 +354,14 @@ def better_exec(code, context, text = None, realfile = "<code>"):
|
||||
code = better_compile(code, realfile, realfile)
|
||||
try:
|
||||
exec(code, get_context(), context)
|
||||
except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
|
||||
# Error already shown so passthrough, no need for traceback
|
||||
except bb.BBHandledException:
|
||||
# Error already shown so passthrough
|
||||
raise
|
||||
except Exception as e:
|
||||
(t, value, tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
raise
|
||||
try:
|
||||
_print_exception(t, value, tb, realfile, text, context)
|
||||
except Exception as e:
|
||||
@@ -413,30 +389,10 @@ def fileslocked(files):
|
||||
for lock in locks:
|
||||
bb.utils.unlockfile(lock)
|
||||
|
||||
@contextmanager
|
||||
def timeout(seconds):
|
||||
def timeout_handler(signum, frame):
|
||||
pass
|
||||
|
||||
original_handler = signal.signal(signal.SIGALRM, timeout_handler)
|
||||
|
||||
try:
|
||||
signal.alarm(seconds)
|
||||
yield
|
||||
finally:
|
||||
signal.alarm(0)
|
||||
signal.signal(signal.SIGALRM, original_handler)
|
||||
|
||||
def lockfile(name, shared=False, retry=True, block=False):
|
||||
def lockfile(name, shared=False, retry=True):
|
||||
"""
|
||||
Use the specified file as a lock file, return when the lock has
|
||||
been acquired. Returns a variable to pass to unlockfile().
|
||||
Parameters:
|
||||
retry: True to re-try locking if it fails, False otherwise
|
||||
block: True to block until the lock succeeds, False otherwise
|
||||
The retry and block parameters are kind of equivalent unless you
|
||||
consider the possibility of sending a signal to the process to break
|
||||
out - at which point you want block=True rather than retry=True.
|
||||
Use the file fn as a lock file, return when the lock has been acquired.
|
||||
Returns a variable to pass to unlockfile().
|
||||
"""
|
||||
dirname = os.path.dirname(name)
|
||||
mkdirhier(dirname)
|
||||
@@ -449,7 +405,7 @@ def lockfile(name, shared=False, retry=True, block=False):
|
||||
op = fcntl.LOCK_EX
|
||||
if shared:
|
||||
op = fcntl.LOCK_SH
|
||||
if not retry and not block:
|
||||
if not retry:
|
||||
op = op | fcntl.LOCK_NB
|
||||
|
||||
while True:
|
||||
@@ -569,7 +525,7 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if removed_vars:
|
||||
if len(removed_vars):
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
@@ -577,7 +533,7 @@ def filter_environment(good_vars):
|
||||
def approved_variables():
|
||||
"""
|
||||
Determine and return the list of whitelisted variables which are approved
|
||||
to remain in the environment.
|
||||
to remain in the envrionment.
|
||||
"""
|
||||
if 'BB_PRESERVE_ENV' in os.environ:
|
||||
return os.environ.keys()
|
||||
@@ -910,19 +866,6 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def contains_any(variable, checkvalues, truevalue, falsevalue, d):
|
||||
val = d.getVar(variable, True)
|
||||
if not val:
|
||||
return falsevalue
|
||||
val = set(val.split())
|
||||
if isinstance(checkvalues, basestring):
|
||||
checkvalues = set(checkvalues.split())
|
||||
else:
|
||||
checkvalues = set(checkvalues)
|
||||
if checkvalues & val:
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def cpu_count():
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
@@ -930,16 +873,21 @@ def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
def process_profilelog(fn):
|
||||
# Redirect stdout to capture profile information
|
||||
pout = open(fn + '.processed', 'w')
|
||||
so = sys.stdout.fileno()
|
||||
orig_so = os.dup(sys.stdout.fileno())
|
||||
os.dup2(pout.fileno(), so)
|
||||
|
||||
import pstats
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p = pstats.Stats(fn)
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
p.sort_stats('cumulative')
|
||||
p.print_stats()
|
||||
|
||||
os.dup2(orig_so, so)
|
||||
pout.flush()
|
||||
pout.close()
|
||||
|
||||
@@ -947,198 +895,5 @@ def process_profilelog(fn):
|
||||
# Was present to work around multiprocessing pool bugs in python < 2.7.3
|
||||
#
|
||||
def multiprocessingpool(*args, **kwargs):
|
||||
|
||||
import multiprocessing.pool
|
||||
#import multiprocessing.util
|
||||
#multiprocessing.util.log_to_stderr(10)
|
||||
# Deal with a multiprocessing bug where signals to the processes would be delayed until the work
|
||||
# completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
|
||||
def wrapper(func):
|
||||
def wrap(self, timeout=None):
|
||||
return func(self, timeout=timeout if timeout is not None else 1e100)
|
||||
return wrap
|
||||
multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
|
||||
|
||||
return multiprocessing.Pool(*args, **kwargs)
|
||||
|
||||
def exec_flat_python_func(func, *args, **kwargs):
|
||||
"""Execute a flat python function (defined with def funcname(args):...)"""
|
||||
# Prepare a small piece of python code which calls the requested function
|
||||
# To do this we need to prepare two things - a set of variables we can use to pass
|
||||
# the values of arguments into the calling function, and the list of arguments for
|
||||
# the function being called
|
||||
context = {}
|
||||
funcargs = []
|
||||
# Handle unnamed arguments
|
||||
aidx = 1
|
||||
for arg in args:
|
||||
argname = 'arg_%s' % aidx
|
||||
context[argname] = arg
|
||||
funcargs.append(argname)
|
||||
aidx += 1
|
||||
# Handle keyword arguments
|
||||
context.update(kwargs)
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
|
||||
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
|
||||
comp = bb.utils.better_compile(code, '<string>', '<string>')
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
return context['retval']
|
||||
|
||||
def edit_metadata_file(meta_file, variables, func):
|
||||
"""Edit a recipe or config file and modify one or more specified
|
||||
variable values set in the file using a specified callback function.
|
||||
The file is only written to if the value(s) actually change.
|
||||
"""
|
||||
var_res = {}
|
||||
for var in variables:
|
||||
var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
varlines = []
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
|
||||
def handle_var_end():
|
||||
(newvalue, indent, minbreak) = func(in_var, full_value)
|
||||
if newvalue != full_value:
|
||||
if isinstance(newvalue, list):
|
||||
intentspc = ' ' * indent
|
||||
if minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_start)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % intentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue))
|
||||
return True
|
||||
else:
|
||||
# Put the old lines back where they were
|
||||
newlines.extend(varlines)
|
||||
return False
|
||||
|
||||
with open(meta_file, 'r') as f:
|
||||
for line in f:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
varlines.append(line)
|
||||
full_value += value[:-1]
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
in_var = None
|
||||
else:
|
||||
matched = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
if var_re.match(line):
|
||||
splitvalue = line.split('"', 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
varlines = [line]
|
||||
in_var = varname
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
in_var = None
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
newlines.append(line)
|
||||
if updated:
|
||||
with open(meta_file, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
|
||||
def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
"""Edit bblayers.conf, adding and/or removing layers"""
|
||||
|
||||
import fnmatch
|
||||
|
||||
def remove_trailing_sep(pth):
|
||||
if pth and pth[-1] == os.sep:
|
||||
pth = pth[:-1]
|
||||
return pth
|
||||
|
||||
def layerlist_param(value):
|
||||
if not value:
|
||||
return []
|
||||
elif isinstance(value, list):
|
||||
return [remove_trailing_sep(x) for x in value]
|
||||
else:
|
||||
return [remove_trailing_sep(value)]
|
||||
|
||||
notadded = []
|
||||
notremoved = []
|
||||
|
||||
addlayers = layerlist_param(add)
|
||||
removelayers = layerlist_param(remove)
|
||||
|
||||
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
|
||||
bblayercalls = []
|
||||
|
||||
def handle_bblayers(varname, origvalue):
|
||||
bblayercalls.append(varname)
|
||||
updated = False
|
||||
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
|
||||
if removelayers:
|
||||
for removelayer in removelayers:
|
||||
matched = False
|
||||
for layer in bblayers:
|
||||
if fnmatch.fnmatch(layer, removelayer):
|
||||
updated = True
|
||||
matched = True
|
||||
bblayers.remove(layer)
|
||||
break
|
||||
if not matched:
|
||||
notremoved.append(removelayer)
|
||||
if addlayers:
|
||||
for addlayer in addlayers:
|
||||
if addlayer not in bblayers:
|
||||
updated = True
|
||||
bblayers.append(addlayer)
|
||||
else:
|
||||
notadded.append(addlayer)
|
||||
|
||||
if updated:
|
||||
return (bblayers, 2, False)
|
||||
else:
|
||||
return (origvalue, 2, False)
|
||||
|
||||
edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
|
||||
|
||||
if not bblayercalls:
|
||||
raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
|
||||
|
||||
return (notadded, notremoved)
|
||||
|
||||
|
||||
def get_file_layer(filename, d):
|
||||
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
|
||||
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
||||
collection_res = {}
|
||||
for collection in collections:
|
||||
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
|
||||
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, filename):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
return match
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
Behold, mortal, the origins of Beautiful Soup...
|
||||
================================================
|
||||
|
||||
Leonard Richardson is the primary programmer.
|
||||
|
||||
Aaron DeVore is awesome.
|
||||
|
||||
Mark Pilgrim provided the encoding detection code that forms the base
|
||||
of UnicodeDammit.
|
||||
|
||||
Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
|
||||
Soup 4 working under Python 3.
|
||||
|
||||
Simon Willison wrote soupselect, which was used to make Beautiful Soup
|
||||
support CSS selectors.
|
||||
|
||||
Sam Ruby helped with a lot of edge cases.
|
||||
|
||||
Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
|
||||
work in solving the nestable tags conundrum.
|
||||
|
||||
An incomplete list of people have contributed patches to Beautiful
|
||||
Soup:
|
||||
|
||||
Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
|
||||
Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
|
||||
Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
|
||||
Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
|
||||
Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
|
||||
Samastur, Jouni Sepp<70>nen, Alexander Schmolck, Andy Theyers, Glyn
|
||||
Webster, Paul Wright, Danny Yoo
|
||||
|
||||
An incomplete list of people who made suggestions or found bugs or
|
||||
found ways to break Beautiful Soup:
|
||||
|
||||
Hanno B<>ck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
|
||||
Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
|
||||
Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
|
||||
warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
|
||||
Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
|
||||
Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
|
||||
Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
|
||||
Sousa Rocha, Yichun Wei, Per Vognsen
|
||||
@@ -1,26 +0,0 @@
|
||||
Beautiful Soup is made available under the MIT license:
|
||||
|
||||
Copyright (c) 2004-2012 Leonard Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE, DAMMIT.
|
||||
|
||||
Beautiful Soup incorporates code from the html5lib library, which is
|
||||
also made available under the MIT license.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,406 +0,0 @@
|
||||
"""Beautiful Soup
|
||||
Elixir and Tonic
|
||||
"The Screen-Scraper's Friend"
|
||||
http://www.crummy.com/software/BeautifulSoup/
|
||||
|
||||
Beautiful Soup uses a pluggable XML or HTML parser to parse a
|
||||
(possibly invalid) document into a tree representation. Beautiful Soup
|
||||
provides provides methods and Pythonic idioms that make it easy to
|
||||
navigate, search, and modify the parse tree.
|
||||
|
||||
Beautiful Soup works with Python 2.6 and up. It works better if lxml
|
||||
and/or html5lib is installed.
|
||||
|
||||
For more than you ever wanted to know about Beautiful Soup, see the
|
||||
documentation:
|
||||
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .builder import builder_registry, ParserRejectedMarkup
|
||||
from .dammit import UnicodeDammit
|
||||
from .element import (
|
||||
CData,
|
||||
Comment,
|
||||
DEFAULT_OUTPUT_ENCODING,
|
||||
Declaration,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
PageElement,
|
||||
ProcessingInstruction,
|
||||
ResultSet,
|
||||
SoupStrainer,
|
||||
Tag,
|
||||
)
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
This class defines the basic interface called by the tree builders.
|
||||
|
||||
These methods will be called by the parser:
|
||||
reset()
|
||||
feed(markup)
|
||||
|
||||
The tree builder may call these methods from its feed() implementation:
|
||||
handle_starttag(name, attrs) # See note about return value
|
||||
handle_endtag(name)
|
||||
handle_data(data) # Appends to the current data node
|
||||
endData(containerClass=NavigableString) # Ends the current data node
|
||||
|
||||
No matter how complicated the underlying parser is, you should be
|
||||
able to build a tree using 'start tag' events, 'end tag' events,
|
||||
'data' events, and "done with data" events.
|
||||
|
||||
If you encounter an empty-element tag (aka a self-closing tag,
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
|
||||
if 'convertEntities' in kwargs:
|
||||
warnings.warn(
|
||||
"BS4 does not respect the convertEntities argument to the "
|
||||
"BeautifulSoup constructor. Entities are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'markupMassage' in kwargs:
|
||||
del kwargs['markupMassage']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the markupMassage argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for any necessary markup massage.")
|
||||
|
||||
if 'smartQuotesTo' in kwargs:
|
||||
del kwargs['smartQuotesTo']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the smartQuotesTo argument to the "
|
||||
"BeautifulSoup constructor. Smart quotes are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'selfClosingTags' in kwargs:
|
||||
del kwargs['selfClosingTags']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the selfClosingTags argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for understanding self-closing tags.")
|
||||
|
||||
if 'isHTML' in kwargs:
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
warnings.warn(
|
||||
'The "%s" argument to the BeautifulSoup constructor '
|
||||
'has been renamed to "%s."' % (old_name, new_name))
|
||||
value = kwargs[old_name]
|
||||
del kwargs[old_name]
|
||||
return value
|
||||
return None
|
||||
|
||||
parse_only = parse_only or deprecated_argument(
|
||||
"parseOnlyThese", "parse_only")
|
||||
|
||||
from_encoding = from_encoding or deprecated_argument(
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
builder_class = builder_registry.lookup(*features)
|
||||
if builder_class is None:
|
||||
raise FeatureNotFound(
|
||||
"Couldn't find a tree builder with the features you "
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
|
||||
self.parse_only = parse_only
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
elif len(markup) <= 256:
|
||||
# Print out warnings for a couple beginner problems
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
possible_filename = markup
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
break
|
||||
except ParserRejectedMarkup:
|
||||
pass
|
||||
|
||||
# Clear out the markup and remove the builder's circular
|
||||
# reference to this object.
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
|
||||
self.builder.feed(self.markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
self.popTag()
|
||||
|
||||
def reset(self):
|
||||
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
self.builder.reset()
|
||||
self.current_data = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
self.preserve_whitespace_tag_stack = []
|
||||
self.pushTag(self)
|
||||
|
||||
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
|
||||
"""Create a new tag associated with this soup."""
|
||||
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
|
||||
def insert_after(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
|
||||
|
||||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
|
||||
self.preserve_whitespace_tag_stack.pop()
|
||||
#print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
self.currentTag = self.tagStack[-1]
|
||||
if tag.name in self.builder.preserve_whitespace_tags:
|
||||
self.preserve_whitespace_tag_stack.append(tag)
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
if not self.preserve_whitespace_tag_stack:
|
||||
strippable = True
|
||||
for i in current_data:
|
||||
if i not in self.ASCII_SPACES:
|
||||
strippable = False
|
||||
break
|
||||
if strippable:
|
||||
if '\n' in current_data:
|
||||
current_data = '\n'
|
||||
else:
|
||||
current_data = ' '
|
||||
|
||||
# Reset the data collector.
|
||||
self.current_data = []
|
||||
|
||||
# Should we add this string to the tree at all?
|
||||
if self.parse_only and len(self.tagStack) <= 1 and \
|
||||
(not self.parse_only.text or \
|
||||
not self.parse_only.search(current_data)):
|
||||
return
|
||||
|
||||
o = containerClass(current_data)
|
||||
self.object_was_parsed(o)
|
||||
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
# The BeautifulSoup object itself can never be popped.
|
||||
return
|
||||
|
||||
most_recently_popped = None
|
||||
|
||||
stack_size = len(self.tagStack)
|
||||
for i in range(stack_size - 1, 0, -1):
|
||||
t = self.tagStack[i]
|
||||
if (name == t.name and nsprefix == t.prefix):
|
||||
if inclusivePop:
|
||||
most_recently_popped = self.popTag()
|
||||
break
|
||||
most_recently_popped = self.popTag()
|
||||
|
||||
return most_recently_popped
|
||||
|
||||
def handle_starttag(self, name, namespace, nsprefix, attrs):
|
||||
"""Push a start tag on to the stack.
|
||||
|
||||
If this method returns None, the tag was rejected by the
|
||||
SoupStrainer. You should proceed as if the tag had not occured
|
||||
in the document. For instance, if this was a self-closing tag,
|
||||
don't call handle_endtag.
|
||||
"""
|
||||
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
self.endData()
|
||||
|
||||
if (self.parse_only and len(self.tagStack) <= 1
|
||||
and (self.parse_only.text
|
||||
or not self.parse_only.search_tag(name, attrs))):
|
||||
return None
|
||||
|
||||
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
|
||||
self.currentTag, self._most_recent_element)
|
||||
if tag is None:
|
||||
return tag
|
||||
if self._most_recent_element:
|
||||
self._most_recent_element.next_element = tag
|
||||
self._most_recent_element = tag
|
||||
self.pushTag(tag)
|
||||
return tag
|
||||
|
||||
def handle_endtag(self, name, nsprefix=None):
|
||||
#print "End tag: " + name
|
||||
self.endData()
|
||||
self._popToTag(name, nsprefix)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.current_data.append(data)
|
||||
|
||||
def decode(self, pretty_print=False,
|
||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Returns a string or Unicode representation of this document.
|
||||
To get Unicode, pass None for encoding."""
|
||||
|
||||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
indent_level = 0
|
||||
return prefix + super(BeautifulSoup, self).decode(
|
||||
indent_level, eventual_encoding, formatter)
|
||||
|
||||
# Alias to make it easier to type import: 'from bs4 import _soup'
|
||||
_s = BeautifulSoup
|
||||
_soup = BeautifulSoup
|
||||
|
||||
class BeautifulStoneSoup(BeautifulSoup):
|
||||
"""Deprecated interface to an XML parser."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['features'] = 'xml'
|
||||
warnings.warn(
|
||||
'The BeautifulStoneSoup class is deprecated. Instead of using '
|
||||
'it, pass features="xml" into the BeautifulSoup constructor.')
|
||||
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopParsing(Exception):
|
||||
pass
|
||||
|
||||
class FeatureNotFound(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
@@ -1,321 +0,0 @@
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import sys
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
whitespace_re
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'HTMLTreeBuilder',
|
||||
'SAXTreeBuilder',
|
||||
'TreeBuilder',
|
||||
'TreeBuilderRegistry',
|
||||
]
|
||||
|
||||
# Some useful features for a TreeBuilder to have.
|
||||
FAST = 'fast'
|
||||
PERMISSIVE = 'permissive'
|
||||
STRICT = 'strict'
|
||||
XML = 'xml'
|
||||
HTML = 'html'
|
||||
HTML_5 = 'html5'
|
||||
|
||||
|
||||
class TreeBuilderRegistry(object):
|
||||
|
||||
def __init__(self):
|
||||
self.builders_for_feature = defaultdict(list)
|
||||
self.builders = []
|
||||
|
||||
def register(self, treebuilder_class):
|
||||
"""Register a treebuilder based on its advertised features."""
|
||||
for feature in treebuilder_class.features:
|
||||
self.builders_for_feature[feature].insert(0, treebuilder_class)
|
||||
self.builders.insert(0, treebuilder_class)
|
||||
|
||||
def lookup(self, *features):
|
||||
if len(self.builders) == 0:
|
||||
# There are no builders at all.
|
||||
return None
|
||||
|
||||
if len(features) == 0:
|
||||
# They didn't ask for any features. Give them the most
|
||||
# recently registered builder.
|
||||
return self.builders[0]
|
||||
|
||||
# Go down the list of features in order, and eliminate any builders
|
||||
# that don't match every feature.
|
||||
features = list(features)
|
||||
features.reverse()
|
||||
candidates = None
|
||||
candidate_set = None
|
||||
while len(features) > 0:
|
||||
feature = features.pop()
|
||||
we_have_the_feature = self.builders_for_feature.get(feature, [])
|
||||
if len(we_have_the_feature) > 0:
|
||||
if candidates is None:
|
||||
candidates = we_have_the_feature
|
||||
candidate_set = set(candidates)
|
||||
else:
|
||||
# Eliminate any candidates that don't have this feature.
|
||||
candidate_set = candidate_set.intersection(
|
||||
set(we_have_the_feature))
|
||||
|
||||
# The only valid candidates are the ones in candidate_set.
|
||||
# Go through the original list of candidates and pick the first one
|
||||
# that's in candidate_set.
|
||||
if candidate_set is None:
|
||||
return None
|
||||
for candidate in candidates:
|
||||
if candidate in candidate_set:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
# The BeautifulSoup class will take feature lists from developers and use them
|
||||
# to look up builders in this registry.
|
||||
builder_registry = TreeBuilderRegistry()
|
||||
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
|
||||
# A value for these tag/attribute combinations is a space- or
|
||||
# comma-separated list of CDATA, rather than a single CDATA.
|
||||
cdata_list_attributes = {}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.soup = None
|
||||
|
||||
def reset(self):
|
||||
pass
|
||||
|
||||
def can_be_empty_element(self, tag_name):
|
||||
"""Might a tag with this name be an empty-element tag?
|
||||
|
||||
The final markup may or may not actually present this tag as
|
||||
self-closing.
|
||||
|
||||
For instance: an HTMLBuilder does not consider a <p> tag to be
|
||||
an empty-element tag (it's not in
|
||||
HTMLBuilder.empty_element_tags). This means an empty <p> tag
|
||||
will be presented as "<p></p>", not "<p />".
|
||||
|
||||
The default implementation has no opinion about which tags are
|
||||
empty-element tags, so a tag will be presented as an
|
||||
empty-element tag if and only if it has no contents.
|
||||
"<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
|
||||
be left alone.
|
||||
"""
|
||||
if self.empty_element_tags is None:
|
||||
return True
|
||||
return tag_name in self.empty_element_tags
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
return markup, None, None, False
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""Wrap an HTML fragment to make it look like a document.
|
||||
|
||||
Different parsers do this differently. For instance, lxml
|
||||
introduces an empty <head> tag, and html5lib
|
||||
doesn't. Abstracting this away lets us write simple tests
|
||||
which run HTML fragments through the parser and compare the
|
||||
results against other HTML fragments.
|
||||
|
||||
This method should not be used outside of tests.
|
||||
"""
|
||||
return fragment
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
return False
|
||||
|
||||
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
|
||||
"""Replaces class="foo bar" with class=["foo", "bar"]
|
||||
|
||||
Modifies its input in place.
|
||||
"""
|
||||
if not attrs:
|
||||
return attrs
|
||||
if self.cdata_list_attributes:
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
# for the same tag when rearranging the parse
|
||||
# tree. On the second call the attribute value
|
||||
# here is already a list. If this happens,
|
||||
# leave the value alone rather than trying to
|
||||
# split it again.
|
||||
values = value
|
||||
attrs[attr] = values
|
||||
return attrs
|
||||
|
||||
class SAXTreeBuilder(TreeBuilder):
|
||||
"""A Beautiful Soup treebuilder that listens for SAX events."""
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
attrs = dict((key[1], value) for key, value in list(attrs.items()))
|
||||
#print "Start %s, %r" % (name, attrs)
|
||||
self.soup.handle_starttag(name, attrs)
|
||||
|
||||
def endElement(self, name):
|
||||
#print "End %s" % name
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def startElementNS(self, nsTuple, nodeName, attrs):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.startElement(nodeName, attrs)
|
||||
|
||||
def endElementNS(self, nsTuple, nodeName):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.endElement(nodeName)
|
||||
#handler.endElementNS((ns, node.nodeName), node.nodeName)
|
||||
|
||||
def startPrefixMapping(self, prefix, nodeValue):
|
||||
# Ignore the prefix for now.
|
||||
pass
|
||||
|
||||
def endPrefixMapping(self, prefix):
|
||||
# Ignore the prefix for now.
|
||||
# handler.endPrefixMapping(prefix)
|
||||
pass
|
||||
|
||||
def characters(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def startDocument(self):
|
||||
pass
|
||||
|
||||
def endDocument(self):
|
||||
pass
|
||||
|
||||
|
||||
class HTMLTreeBuilder(TreeBuilder):
|
||||
"""This TreeBuilder knows facts about HTML.
|
||||
|
||||
Such as which tags are empty-element tags.
|
||||
"""
|
||||
|
||||
preserve_whitespace_tags = set(['pre', 'textarea'])
|
||||
empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
|
||||
'spacer', 'link', 'frame', 'base'])
|
||||
|
||||
# The HTML standard defines these attributes as containing a
|
||||
# space-separated list of values, not a single value. That is,
|
||||
# class="foo bar" means that the 'class' attribute has two values,
|
||||
# 'foo' and 'bar', not the single value 'foo bar'. When we
|
||||
# encounter one of these attributes, we will parse its value into
|
||||
# a list of values if possible. Upon output, the list will be
|
||||
# converted back into a string.
|
||||
cdata_list_attributes = {
|
||||
"*" : ['class', 'accesskey', 'dropzone'],
|
||||
"a" : ['rel', 'rev'],
|
||||
"link" : ['rel', 'rev'],
|
||||
"td" : ["headers"],
|
||||
"th" : ["headers"],
|
||||
"td" : ["headers"],
|
||||
"form" : ["accept-charset"],
|
||||
"object" : ["archive"],
|
||||
|
||||
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
|
||||
"area" : ["rel"],
|
||||
"icon" : ["sizes"],
|
||||
"iframe" : ["sandbox"],
|
||||
"output" : ["for"],
|
||||
}
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
# We are only interested in <meta> tags
|
||||
if tag.name != 'meta':
|
||||
return False
|
||||
|
||||
http_equiv = tag.get('http-equiv')
|
||||
content = tag.get('content')
|
||||
charset = tag.get('charset')
|
||||
|
||||
# We are interested in <meta> tags that say what encoding the
|
||||
# document was originally in. This means HTML 5-style <meta>
|
||||
# tags that provide the "charset" attribute. It also means
|
||||
# HTML 4-style <meta> tags that provide the "content"
|
||||
# attribute and have "http-equiv" set to "content-type".
|
||||
#
|
||||
# In both cases we will replace the value of the appropriate
|
||||
# attribute with a standin object that can take on any
|
||||
# encoding.
|
||||
meta_encoding = None
|
||||
if charset is not None:
|
||||
# HTML 5 style:
|
||||
# <meta charset="utf8">
|
||||
meta_encoding = charset
|
||||
tag['charset'] = CharsetMetaAttributeValue(charset)
|
||||
|
||||
elif (content is not None and http_equiv is not None
|
||||
and http_equiv.lower() == 'content-type'):
|
||||
# HTML 4 style:
|
||||
# <meta http-equiv="content-type" content="text/html; charset=utf8">
|
||||
tag['content'] = ContentMetaAttributeValue(content)
|
||||
|
||||
return (meta_encoding is not None)
|
||||
|
||||
def register_treebuilders_from(module):
|
||||
"""Copy TreeBuilders from the given module into this module."""
|
||||
# I'm fairly sure this is not the best way to do this.
|
||||
this_module = sys.modules['bs4.builder']
|
||||
for name in module.__all__:
|
||||
obj = getattr(module, name)
|
||||
|
||||
if issubclass(obj, TreeBuilder):
|
||||
setattr(this_module, name, obj)
|
||||
this_module.__all__.append(name)
|
||||
# Register the builder while we're at it.
|
||||
this_module.builder_registry.register(obj)
|
||||
|
||||
class ParserRejectedMarkup(Exception):
|
||||
pass
|
||||
|
||||
# Builders are registered in reverse order of priority, so that custom
|
||||
# builder registrations will take precedence. In general, we want lxml
|
||||
# to take precedence over html5lib, because it's faster. And we only
|
||||
# want to use HTMLParser as a last result.
|
||||
from . import _htmlparser
|
||||
register_treebuilders_from(_htmlparser)
|
||||
try:
|
||||
from . import _html5lib
|
||||
register_treebuilders_from(_html5lib)
|
||||
except ImportError:
|
||||
# They don't have html5lib installed.
|
||||
pass
|
||||
try:
|
||||
from . import _lxml
|
||||
register_treebuilders_from(_lxml)
|
||||
except ImportError:
|
||||
# They don't have lxml installed.
|
||||
pass
|
||||
@@ -1,292 +0,0 @@
|
||||
__all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
HTML,
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
try:
|
||||
# html5lib >= 0.99999999/1.0b9
|
||||
from html5lib.treebuilders import base as treebuildersbase
|
||||
except ImportError:
|
||||
# html5lib <= 0.9999999/1.0b8
|
||||
from html5lib.treebuilders import _base as treebuildersbase
|
||||
from html5lib.constants import namespaces
|
||||
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
Tag,
|
||||
)
|
||||
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
def feed(self, markup):
|
||||
if self.soup.parse_only is not None:
|
||||
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
|
||||
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
else:
|
||||
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
|
||||
|
||||
def create_treebuilder(self, namespaceHTMLElements):
|
||||
self.underlying_builder = TreeBuilderForHtml5lib(
|
||||
self.soup, namespaceHTMLElements)
|
||||
return self.underlying_builder
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
|
||||
|
||||
def documentClass(self):
|
||||
self.soup.reset()
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def insertDoctype(self, token):
|
||||
name = token["name"]
|
||||
publicId = token["publicId"]
|
||||
systemId = token["systemId"]
|
||||
|
||||
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def elementClass(self, name, namespace):
|
||||
tag = self.soup.new_tag(name, namespace)
|
||||
return Element(tag, self.soup, namespace)
|
||||
|
||||
def commentClass(self, data):
|
||||
return TextNode(Comment(data), self.soup)
|
||||
|
||||
def fragmentClass(self):
|
||||
self.soup = BeautifulSoup("")
|
||||
self.soup.name = "[document_fragment]"
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def appendChild(self, node):
|
||||
# XXX This code is not covered by the BS4 tests.
|
||||
self.soup.append(node.element)
|
||||
|
||||
def getDocument(self):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return treebuildersbase.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
self.attrs = dict(self.element.attrs)
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
def keys(self):
|
||||
return list(self.attrs.keys())
|
||||
def __len__(self):
|
||||
return len(self.attrs)
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
def __contains__(self, name):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(treebuildersbase.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
treebuildersbase.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
string_child = child = node
|
||||
elif isinstance(node, Tag):
|
||||
# Some other piece of code decided to pass in a Tag
|
||||
# instead of creating an Element object to contain the
|
||||
# Tag.
|
||||
child = node
|
||||
elif node.element.__class__ == NavigableString:
|
||||
string_child = child = node.element
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
and self.element.contents[-1].__class__ == NavigableString):
|
||||
# We are appending a string onto another string.
|
||||
# TODO This has O(n^2) performance, for input like
|
||||
# "a</a>a</a>a</a>..."
|
||||
old_element = self.element.contents[-1]
|
||||
new_element = self.soup.new_string(old_element + string_child)
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
# Tell Beautiful Soup to act as if it parsed this element
|
||||
# immediately after the parent's last descendant. (Or
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
self.soup.object_was_parsed(
|
||||
child, parent=self.element,
|
||||
most_recent_element=most_recent_element)
|
||||
|
||||
def getAttributes(self):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
for name, value in list(attributes.items()):
|
||||
if isinstance(name, tuple):
|
||||
new_name = NamespacedAttribute(*name)
|
||||
del attributes[name]
|
||||
attributes[new_name] = value
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
# Call set_up_substitutions manually.
|
||||
#
|
||||
# The Tag constructor called this method when the Tag was created,
|
||||
# but we just set/changed the attributes, so call it again.
|
||||
self.soup.builder.set_up_substitutions(self.element)
|
||||
attributes = property(getAttributes, setAttributes)
|
||||
|
||||
def insertText(self, data, insertBefore=None):
|
||||
if insertBefore:
|
||||
text = TextNode(self.soup.new_string(data), self.soup)
|
||||
self.insertBefore(data, insertBefore)
|
||||
else:
|
||||
self.appendChild(data)
|
||||
|
||||
def insertBefore(self, node, refNode):
|
||||
index = self.element.index(refNode.element)
|
||||
if (node.element.__class__ == NavigableString and self.element.contents
|
||||
and self.element.contents[index-1].__class__ == NavigableString):
|
||||
# (See comments in appendChild)
|
||||
old_node = self.element.contents[index-1]
|
||||
new_str = self.soup.new_string(old_node + node.element)
|
||||
old_node.replace_with(new_str)
|
||||
else:
|
||||
self.element.insert(index, node.element)
|
||||
node.parent = self
|
||||
|
||||
def removeChild(self, node):
|
||||
node.element.extract()
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
# are removed.
|
||||
final_next_element = element.next_sibling
|
||||
|
||||
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
|
||||
if len(new_parent_element.contents) > 0:
|
||||
# The new parent already contains children. We will be
|
||||
# appending this tag's children to the end.
|
||||
new_parents_last_child = new_parent_element.contents[-1]
|
||||
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
|
||||
else:
|
||||
# The new parent contains no children.
|
||||
new_parents_last_child = None
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
child.parent = new_parent_element
|
||||
new_parent_element.contents.append(child)
|
||||
|
||||
# Now that this element has no children, change its .next_element.
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
for key,value in self.attributes:
|
||||
node.attributes[key] = value
|
||||
return node
|
||||
|
||||
def hasContent(self):
|
||||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
||||
nameTuple = property(getNameTuple)
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
treebuildersbase.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
def cloneNode(self):
|
||||
raise NotImplementedError
|
||||
@@ -1,258 +0,0 @@
|
||||
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
|
||||
|
||||
__all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
|
||||
# argument, which we'd like to set to False. Unfortunately,
|
||||
# http://bugs.python.org/issue13273 makes strict=True a better bet
|
||||
# before Python 3.2.3.
|
||||
#
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
Comment,
|
||||
Declaration,
|
||||
Doctype,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||
|
||||
from bs4.builder import (
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
STRICT,
|
||||
)
|
||||
|
||||
|
||||
HTMLPARSER = 'html.parser'
|
||||
|
||||
class BeautifulSoupHTMLParser(HTMLParser):
|
||||
def handle_starttag(self, name, attrs):
|
||||
# XXX namespace
|
||||
attr_dict = {}
|
||||
for key, value in attrs:
|
||||
# Change None attribute values to the empty string
|
||||
# for consistency with the other tree builders.
|
||||
if value is None:
|
||||
value = ''
|
||||
attr_dict[key] = value
|
||||
attrvalue = '""'
|
||||
self.soup.handle_starttag(name, None, None, attr_dict)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.soup.handle_data(data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
real_name = int(name.lstrip('X'), 16)
|
||||
else:
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
|
||||
if character is not None:
|
||||
data = character
|
||||
else:
|
||||
data = "&%s;" % name
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self.soup.endData()
|
||||
if data.startswith("DOCTYPE "):
|
||||
data = data[len("DOCTYPE "):]
|
||||
elif data == 'DOCTYPE':
|
||||
# i.e. "<!DOCTYPE>"
|
||||
data = ''
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Doctype)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
if data.upper().startswith('CDATA['):
|
||||
cls = CData
|
||||
data = data[len('CDATA['):]
|
||||
else:
|
||||
cls = Declaration
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(cls)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
|
||||
def feed(self, markup):
|
||||
args, kwargs = self.parser_args
|
||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
|
||||
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
|
||||
# string.
|
||||
#
|
||||
# XXX This code can be removed once most Python 3 users are on 3.2.3.
|
||||
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
|
||||
import re
|
||||
attrfind_tolerant = re.compile(
|
||||
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
|
||||
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
|
||||
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
|
||||
|
||||
locatestarttagend = re.compile(r"""
|
||||
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
|
||||
(?:\s+ # whitespace before attribute name
|
||||
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
|
||||
(?:\s*=\s* # value indicator
|
||||
(?:'[^']*' # LITA-enclosed value
|
||||
|\"[^\"]*\" # LIT-enclosed value
|
||||
|[^'\">\s]+ # bare value
|
||||
)
|
||||
)?
|
||||
)
|
||||
)*
|
||||
\s* # trailing whitespace
|
||||
""", re.VERBOSE)
|
||||
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
|
||||
|
||||
from html.parser import tagfind, attrfind
|
||||
|
||||
def parse_starttag(self, i):
|
||||
self.__starttag_text = None
|
||||
endpos = self.check_for_whole_start_tag(i)
|
||||
if endpos < 0:
|
||||
return endpos
|
||||
rawdata = self.rawdata
|
||||
self.__starttag_text = rawdata[i:endpos]
|
||||
|
||||
# Now parse the data between i+1 and j into a tag and attrs
|
||||
attrs = []
|
||||
match = tagfind.match(rawdata, i+1)
|
||||
assert match, 'unexpected call to parse_starttag()'
|
||||
k = match.end()
|
||||
self.lasttag = tag = rawdata[i+1:k].lower()
|
||||
while k < endpos:
|
||||
if self.strict:
|
||||
m = attrfind.match(rawdata, k)
|
||||
else:
|
||||
m = attrfind_tolerant.match(rawdata, k)
|
||||
if not m:
|
||||
break
|
||||
attrname, rest, attrvalue = m.group(1, 2, 3)
|
||||
if not rest:
|
||||
attrvalue = None
|
||||
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
|
||||
attrvalue[:1] == '"' == attrvalue[-1:]:
|
||||
attrvalue = attrvalue[1:-1]
|
||||
if attrvalue:
|
||||
attrvalue = self.unescape(attrvalue)
|
||||
attrs.append((attrname.lower(), attrvalue))
|
||||
k = m.end()
|
||||
|
||||
end = rawdata[k:endpos].strip()
|
||||
if end not in (">", "/>"):
|
||||
lineno, offset = self.getpos()
|
||||
if "\n" in self.__starttag_text:
|
||||
lineno = lineno + self.__starttag_text.count("\n")
|
||||
offset = len(self.__starttag_text) \
|
||||
- self.__starttag_text.rfind("\n")
|
||||
else:
|
||||
offset = offset + len(self.__starttag_text)
|
||||
if self.strict:
|
||||
self.error("junk characters in start tag: %r"
|
||||
% (rawdata[k:endpos][:20],))
|
||||
self.handle_data(rawdata[i:endpos])
|
||||
return endpos
|
||||
if end.endswith('/>'):
|
||||
# XHTML-style empty tag: <span attr="value" />
|
||||
self.handle_startendtag(tag, attrs)
|
||||
else:
|
||||
self.handle_starttag(tag, attrs)
|
||||
if tag in self.CDATA_CONTENT_ELEMENTS:
|
||||
self.set_cdata_mode(tag)
|
||||
return endpos
|
||||
|
||||
def set_cdata_mode(self, elem):
|
||||
self.cdata_elem = elem.lower()
|
||||
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
|
||||
|
||||
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
|
||||
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = True
|
||||
@@ -1,233 +0,0 @@
|
||||
__all__ = [
|
||||
'LXMLTreeBuilderForXML',
|
||||
'LXMLTreeBuilder',
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
PERMISSIVE,
|
||||
ParserRejectedMarkup,
|
||||
TreeBuilder,
|
||||
XML)
|
||||
from bs4.dammit import EncodingDetector
|
||||
|
||||
LXML = 'lxml'
|
||||
|
||||
class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
DEFAULT_PARSER_CLASS = etree.XMLParser
|
||||
|
||||
is_xml = True
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
# This namespace mapping is specified in the XML Namespace
|
||||
# standard.
|
||||
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
|
||||
|
||||
def default_parser(self, encoding):
|
||||
# This can either return a parser object or a class, which
|
||||
# will be instantiated with default arguments.
|
||||
if self._default_parser is not None:
|
||||
return self._default_parser
|
||||
return etree.XMLParser(
|
||||
target=self, strip_cdata=False, recover=True, encoding=encoding)
|
||||
|
||||
def parser_for(self, encoding):
|
||||
# Use the default parser.
|
||||
parser = self.default_parser(encoding)
|
||||
|
||||
if isinstance(parser, collections.Callable):
|
||||
# Instantiate the parser with default arguments
|
||||
parser = parser(target=self, strip_cdata=False, encoding=encoding)
|
||||
return parser
|
||||
|
||||
def __init__(self, parser=None, empty_element_tags=None):
|
||||
# TODO: Issue a warning if parser is present but not a
|
||||
# callable, since that means there's no way to create new
|
||||
# parsers for different encodings.
|
||||
self._default_parser = parser
|
||||
if empty_element_tags is not None:
|
||||
self.empty_element_tags = set(empty_element_tags)
|
||||
self.soup = None
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def _getNsTag(self, tag):
|
||||
# Split the namespace URL out of a fully-qualified lxml tag
|
||||
# name. Copied from lxml's src/lxml/sax.py.
|
||||
if tag[0] == '{':
|
||||
return tuple(tag[1:].split('}', 1))
|
||||
else:
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
(markup, encoding, declared encoding,
|
||||
has undergone character replacement)
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
document_declared_encoding, False)
|
||||
|
||||
# Instead of using UnicodeDammit to convert the bytestring to
|
||||
# Unicode using different encodings, use EncodingDetector to
|
||||
# iterate over the encodings, and tell lxml to try to parse
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
# or the parser won't be initialized.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
try:
|
||||
self.parser = self.parser_for(self.soup.original_encoding)
|
||||
self.parser.feed(data)
|
||||
while len(data) != 0:
|
||||
# Now call feed() on the rest of the data, chunk by chunk.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def start(self, name, attrs, nsmap={}):
|
||||
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
|
||||
attrs = dict(attrs)
|
||||
nsprefix = None
|
||||
# Invert each namespace map as it comes in.
|
||||
if len(self.nsmaps) > 1:
|
||||
# There are no new namespaces for this tag, but
|
||||
# non-default namespaces are in play, so we need a
|
||||
# separate tag stack to know when they end.
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
|
||||
# Namespaces are in play. Find any attributes that came in
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
else:
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
attr = NamespacedAttribute(nsprefix, attr, namespace)
|
||||
new_attrs[attr] = value
|
||||
attrs = new_attrs
|
||||
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
|
||||
|
||||
def _prefix_for_namespace(self, namespace):
|
||||
"""Find the currently active prefix for the given namespace."""
|
||||
if namespace is None:
|
||||
return None
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
return inverted_nsmap[namespace]
|
||||
return None
|
||||
|
||||
def end(self, name):
|
||||
self.soup.endData()
|
||||
completed_tag = self.soup.tagStack[-1]
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = None
|
||||
if namespace is not None:
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
nsprefix = inverted_nsmap[namespace]
|
||||
break
|
||||
self.soup.handle_endtag(name, nsprefix)
|
||||
if len(self.nsmaps) > 1:
|
||||
# This tag, or one of its parents, introduced a namespace
|
||||
# mapping, so pop it off the stack.
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def doctype(self, name, pubid, system):
|
||||
self.soup.endData()
|
||||
doctype = Doctype.for_name_and_ids(name, pubid, system)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def comment(self, content):
|
||||
"Handle comments as Comment objects."
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(content)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
return etree.HTMLParser
|
||||
|
||||
def feed(self, markup):
|
||||
encoding = self.soup.original_encoding
|
||||
try:
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
@@ -1,829 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Beautiful Soup bonus library: Unicode, Dammit
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
|
||||
# Import a library to autodetect character encodings.
|
||||
chardet_type = None
|
||||
try:
|
||||
# First try the fast C implementation.
|
||||
# PyPI package: cchardet
|
||||
import cchardet
|
||||
def chardet_dammit(s):
|
||||
return cchardet.detect(s)['encoding']
|
||||
except ImportError:
|
||||
try:
|
||||
# Fall back to the pure Python implementation
|
||||
# Debian package: python-chardet
|
||||
# PyPI package: chardet
|
||||
import chardet
|
||||
def chardet_dammit(s):
|
||||
return chardet.detect(s)['encoding']
|
||||
#import chardet.constants
|
||||
#chardet.constants._debug = 1
|
||||
except ImportError:
|
||||
# No chardet available.
|
||||
def chardet_dammit(s):
|
||||
return None
|
||||
|
||||
# Available from http://cjkpython.i18n.org/.
|
||||
try:
|
||||
import iconv_codec
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
xml_encoding_re = re.compile(
|
||||
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
|
||||
html_meta_re = re.compile(
|
||||
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
|
||||
|
||||
class EntitySubstitution(object):
|
||||
|
||||
"""Substitute XML or HTML entities for the corresponding characters."""
|
||||
|
||||
def _populate_class_variables():
|
||||
lookup = {}
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
# is handled elsewhere.
|
||||
characters_for_re.append(character)
|
||||
lookup[character] = name
|
||||
# But we do want to turn " into the quotation mark.
|
||||
reverse_lookup[name] = character
|
||||
re_definition = "[%s]" % "".join(characters_for_re)
|
||||
return lookup, reverse_lookup, re.compile(re_definition)
|
||||
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
|
||||
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
|
||||
|
||||
CHARACTER_TO_XML_ENTITY = {
|
||||
"'": "apos",
|
||||
'"': "quot",
|
||||
"&": "amp",
|
||||
"<": "lt",
|
||||
">": "gt",
|
||||
}
|
||||
|
||||
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
|
||||
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
|
||||
")")
|
||||
|
||||
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
|
||||
|
||||
@classmethod
|
||||
def _substitute_html_entity(cls, matchobj):
|
||||
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def _substitute_xml_entity(cls, matchobj):
|
||||
"""Used with a regular expression to substitute the
|
||||
appropriate XML entity for an XML special character."""
|
||||
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def quoted_attribute_value(self, value):
|
||||
"""Make a value into a quoted XML attribute, possibly escaping it.
|
||||
|
||||
Most strings will be quoted using double quotes.
|
||||
|
||||
Bob's Bar -> "Bob's Bar"
|
||||
|
||||
If a string contains double quotes, it will be quoted using
|
||||
single quotes.
|
||||
|
||||
Welcome to "my bar" -> 'Welcome to "my bar"'
|
||||
|
||||
If a string contains both single and double quotes, the
|
||||
double quotes will be escaped, and the string will be quoted
|
||||
using double quotes.
|
||||
|
||||
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
|
||||
"""
|
||||
quote_with = '"'
|
||||
if '"' in value:
|
||||
if "'" in value:
|
||||
# The string contains both single and double
|
||||
# quotes. Turn the double quotes into
|
||||
# entities. We quote the double quotes rather than
|
||||
# the single quotes because the entity name is
|
||||
# """ whether this is HTML or XML. If we
|
||||
# quoted the single quotes, we'd have to decide
|
||||
# between ' and &squot;.
|
||||
replace_with = """
|
||||
value = value.replace('"', replace_with)
|
||||
else:
|
||||
# There are double quotes but no single quotes.
|
||||
# We can use single quotes to quote the attribute.
|
||||
quote_with = "'"
|
||||
return quote_with + value + quote_with
|
||||
|
||||
@classmethod
|
||||
def substitute_xml(cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign
|
||||
will become <, the greater-than sign will become >,
|
||||
and any ampersands will become &. If you want ampersands
|
||||
that appear to be part of an entity definition to be left
|
||||
alone, use substitute_xml_containing_entities() instead.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets and ampersands.
|
||||
value = cls.AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_xml_containing_entities(
|
||||
cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign will
|
||||
become <, the greater-than sign will become >, and any
|
||||
ampersands that are not part of an entity defition will
|
||||
become &.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets, and ampersands that aren't part of
|
||||
# entities.
|
||||
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_html(cls, s):
|
||||
"""Replace certain Unicode characters with named HTML entities.
|
||||
|
||||
This differs from data.encode(encoding, 'xmlcharrefreplace')
|
||||
in that the goal is to make the result more readable (to those
|
||||
with ASCII displays) rather than to recover from
|
||||
errors. There's absolutely nothing wrong with a UTF-8 string
|
||||
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
|
||||
character with "é" will make it more readable to some
|
||||
people.
|
||||
"""
|
||||
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
|
||||
cls._substitute_html_entity, s)
|
||||
|
||||
|
||||
class EncodingDetector:
|
||||
"""Suggests a number of possible encodings for a bytestring.
|
||||
|
||||
Order of precedence:
|
||||
|
||||
1. Encodings you specifically tell EncodingDetector to try first
|
||||
(the override_encodings argument to the constructor).
|
||||
|
||||
2. An encoding declared within the bytestring itself, either in an
|
||||
XML declaration (if the bytestring is to be interpreted as an XML
|
||||
document), or in a <meta> tag (if the bytestring is to be
|
||||
interpreted as an HTML document.)
|
||||
|
||||
3. An encoding detected through textual analysis by chardet,
|
||||
cchardet, or a similar external library.
|
||||
|
||||
4. UTF-8.
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
|
||||
# First order of business: strip a byte-order mark.
|
||||
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
|
||||
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def encodings(self):
|
||||
"""Yield a number of encodings that might work for this markup."""
|
||||
tried = set()
|
||||
for e in self.override_encodings:
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
# Did the document originally start with a byte-order mark
|
||||
# that indicated its encoding?
|
||||
if self._usable(self.sniffed_encoding, tried):
|
||||
yield self.sniffed_encoding
|
||||
|
||||
# Look within the document for an XML or HTML encoding
|
||||
# declaration.
|
||||
if self.declared_encoding is None:
|
||||
self.declared_encoding = self.find_declared_encoding(
|
||||
self.markup, self.is_html)
|
||||
if self._usable(self.declared_encoding, tried):
|
||||
yield self.declared_encoding
|
||||
|
||||
# Use third-party character set detection to guess at the
|
||||
# encoding.
|
||||
if self.chardet_encoding is None:
|
||||
self.chardet_encoding = chardet_dammit(self.markup)
|
||||
if self._usable(self.chardet_encoding, tried):
|
||||
yield self.chardet_encoding
|
||||
|
||||
# As a last-ditch effort, try utf-8 and windows-1252.
|
||||
for e in ('utf-8', 'windows-1252'):
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
@classmethod
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
data = data[2:]
|
||||
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16le'
|
||||
data = data[2:]
|
||||
elif data[:3] == b'\xef\xbb\xbf':
|
||||
encoding = 'utf-8'
|
||||
data = data[3:]
|
||||
elif data[:4] == b'\x00\x00\xfe\xff':
|
||||
encoding = 'utf-32be'
|
||||
data = data[4:]
|
||||
elif data[:4] == b'\xff\xfe\x00\x00':
|
||||
encoding = 'utf-32le'
|
||||
data = data[4:]
|
||||
return data, encoding
|
||||
|
||||
@classmethod
|
||||
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
|
||||
"""Given a document, tries to find its declared encoding.
|
||||
|
||||
An XML encoding is declared at the beginning of the document.
|
||||
|
||||
An HTML encoding is declared in a <meta> tag, hopefully near the
|
||||
beginning of the document.
|
||||
"""
|
||||
if search_entire_document:
|
||||
xml_endpos = html_endpos = len(markup)
|
||||
else:
|
||||
xml_endpos = 1024
|
||||
html_endpos = max(2048, int(len(markup) * 0.05))
|
||||
|
||||
declared_encoding = None
|
||||
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
|
||||
if not declared_encoding_match and is_html:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
|
||||
class UnicodeDammit:
|
||||
"""A class for detecting the encoding of a *ML document and
|
||||
converting it to a Unicode string. If the source encoding is
|
||||
windows-1252, can replace MS smart quotes with their HTML or XML
|
||||
equivalents."""
|
||||
|
||||
# This dictionary maps commonly seen values for "charset" in HTML
|
||||
# meta tags to the corresponding Python codec names. It only covers
|
||||
# values that aren't in Python's aliases and can't be determined
|
||||
# by the heuristics in find_codec.
|
||||
CHARSET_ALIASES = {"macintosh": "mac-roman",
|
||||
"x-sjis": "shift-jis"}
|
||||
|
||||
ENCODINGS_WITH_SMART_QUOTES = [
|
||||
"windows-1252",
|
||||
"iso-8859-1",
|
||||
"iso-8859-2",
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
# The encoding detector may have stripped a byte-order mark.
|
||||
# Use the stripped markup from this point on.
|
||||
self.markup = self.detector.markup
|
||||
|
||||
u = None
|
||||
for encoding in self.detector.encodings:
|
||||
markup = self.detector.markup
|
||||
u = self._convert_from(encoding)
|
||||
if u is not None:
|
||||
break
|
||||
|
||||
if not u:
|
||||
# None of the encodings worked. As an absolute last resort,
|
||||
# try them again with character replacement.
|
||||
|
||||
for encoding in self.detector.encodings:
|
||||
if encoding != "ascii":
|
||||
u = self._convert_from(encoding, "replace")
|
||||
if u is not None:
|
||||
logging.warning(
|
||||
"Some characters could not be decoded, and were "
|
||||
"replaced with REPLACEMENT CHARACTER.")
|
||||
self.contains_replacement_characters = True
|
||||
break
|
||||
|
||||
# If none of that worked, we could at this point force it to
|
||||
# ASCII, but that would destroy so much data that I think
|
||||
# giving up is better.
|
||||
self.unicode_markup = u
|
||||
if not u:
|
||||
self.original_encoding = None
|
||||
|
||||
def _sub_ms_char(self, match):
|
||||
"""Changes a MS smart quote character to an XML or HTML
|
||||
entity, or an ASCII character."""
|
||||
orig = match.group(1)
|
||||
if self.smart_quotes_to == 'ascii':
|
||||
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
|
||||
else:
|
||||
sub = self.MS_CHARS.get(orig)
|
||||
if type(sub) == tuple:
|
||||
if self.smart_quotes_to == 'xml':
|
||||
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
|
||||
else:
|
||||
sub = '&'.encode() + sub[0].encode() + ';'.encode()
|
||||
else:
|
||||
sub = sub.encode()
|
||||
return sub
|
||||
|
||||
def _convert_from(self, proposed, errors="strict"):
|
||||
proposed = self.find_codec(proposed)
|
||||
if not proposed or (proposed, errors) in self.tried_encodings:
|
||||
return None
|
||||
self.tried_encodings.append((proposed, errors))
|
||||
markup = self.markup
|
||||
# Convert smart quotes to HTML if coming from an encoding
|
||||
# that might have them.
|
||||
if (self.smart_quotes_to is not None
|
||||
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
|
||||
smart_quotes_re = b"([\x80-\x9f])"
|
||||
smart_quotes_compiled = re.compile(smart_quotes_re)
|
||||
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
|
||||
|
||||
try:
|
||||
#print "Trying to convert document to %s (errors=%s)" % (
|
||||
# proposed, errors)
|
||||
u = self._to_unicode(markup, proposed, errors)
|
||||
self.markup = u
|
||||
self.original_encoding = proposed
|
||||
except Exception as e:
|
||||
#print "That didn't work!"
|
||||
#print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
if not self.is_html:
|
||||
return None
|
||||
return self.detector.declared_encoding
|
||||
|
||||
def find_codec(self, charset):
|
||||
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
|
||||
or (charset and self._codec(charset.replace("-", "")))
|
||||
or (charset and self._codec(charset.replace("-", "_")))
|
||||
or (charset and charset.lower())
|
||||
or charset
|
||||
)
|
||||
if value:
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
def _codec(self, charset):
|
||||
if not charset:
|
||||
return charset
|
||||
codec = None
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
codec = charset
|
||||
except (LookupError, ValueError):
|
||||
pass
|
||||
return codec
|
||||
|
||||
|
||||
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
|
||||
MS_CHARS = {b'\x80': ('euro', '20AC'),
|
||||
b'\x81': ' ',
|
||||
b'\x82': ('sbquo', '201A'),
|
||||
b'\x83': ('fnof', '192'),
|
||||
b'\x84': ('bdquo', '201E'),
|
||||
b'\x85': ('hellip', '2026'),
|
||||
b'\x86': ('dagger', '2020'),
|
||||
b'\x87': ('Dagger', '2021'),
|
||||
b'\x88': ('circ', '2C6'),
|
||||
b'\x89': ('permil', '2030'),
|
||||
b'\x8A': ('Scaron', '160'),
|
||||
b'\x8B': ('lsaquo', '2039'),
|
||||
b'\x8C': ('OElig', '152'),
|
||||
b'\x8D': '?',
|
||||
b'\x8E': ('#x17D', '17D'),
|
||||
b'\x8F': '?',
|
||||
b'\x90': '?',
|
||||
b'\x91': ('lsquo', '2018'),
|
||||
b'\x92': ('rsquo', '2019'),
|
||||
b'\x93': ('ldquo', '201C'),
|
||||
b'\x94': ('rdquo', '201D'),
|
||||
b'\x95': ('bull', '2022'),
|
||||
b'\x96': ('ndash', '2013'),
|
||||
b'\x97': ('mdash', '2014'),
|
||||
b'\x98': ('tilde', '2DC'),
|
||||
b'\x99': ('trade', '2122'),
|
||||
b'\x9a': ('scaron', '161'),
|
||||
b'\x9b': ('rsaquo', '203A'),
|
||||
b'\x9c': ('oelig', '153'),
|
||||
b'\x9d': '?',
|
||||
b'\x9e': ('#x17E', '17E'),
|
||||
b'\x9f': ('Yuml', ''),}
|
||||
|
||||
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
|
||||
# horrors like stripping diacritical marks to turn á into a, but also
|
||||
# contains non-horrors like turning “ into ".
|
||||
MS_CHARS_TO_ASCII = {
|
||||
b'\x80' : 'EUR',
|
||||
b'\x81' : ' ',
|
||||
b'\x82' : ',',
|
||||
b'\x83' : 'f',
|
||||
b'\x84' : ',,',
|
||||
b'\x85' : '...',
|
||||
b'\x86' : '+',
|
||||
b'\x87' : '++',
|
||||
b'\x88' : '^',
|
||||
b'\x89' : '%',
|
||||
b'\x8a' : 'S',
|
||||
b'\x8b' : '<',
|
||||
b'\x8c' : 'OE',
|
||||
b'\x8d' : '?',
|
||||
b'\x8e' : 'Z',
|
||||
b'\x8f' : '?',
|
||||
b'\x90' : '?',
|
||||
b'\x91' : "'",
|
||||
b'\x92' : "'",
|
||||
b'\x93' : '"',
|
||||
b'\x94' : '"',
|
||||
b'\x95' : '*',
|
||||
b'\x96' : '-',
|
||||
b'\x97' : '--',
|
||||
b'\x98' : '~',
|
||||
b'\x99' : '(TM)',
|
||||
b'\x9a' : 's',
|
||||
b'\x9b' : '>',
|
||||
b'\x9c' : 'oe',
|
||||
b'\x9d' : '?',
|
||||
b'\x9e' : 'z',
|
||||
b'\x9f' : 'Y',
|
||||
b'\xa0' : ' ',
|
||||
b'\xa1' : '!',
|
||||
b'\xa2' : 'c',
|
||||
b'\xa3' : 'GBP',
|
||||
b'\xa4' : '$', #This approximation is especially parochial--this is the
|
||||
#generic currency symbol.
|
||||
b'\xa5' : 'YEN',
|
||||
b'\xa6' : '|',
|
||||
b'\xa7' : 'S',
|
||||
b'\xa8' : '..',
|
||||
b'\xa9' : '',
|
||||
b'\xaa' : '(th)',
|
||||
b'\xab' : '<<',
|
||||
b'\xac' : '!',
|
||||
b'\xad' : ' ',
|
||||
b'\xae' : '(R)',
|
||||
b'\xaf' : '-',
|
||||
b'\xb0' : 'o',
|
||||
b'\xb1' : '+-',
|
||||
b'\xb2' : '2',
|
||||
b'\xb3' : '3',
|
||||
b'\xb4' : ("'", 'acute'),
|
||||
b'\xb5' : 'u',
|
||||
b'\xb6' : 'P',
|
||||
b'\xb7' : '*',
|
||||
b'\xb8' : ',',
|
||||
b'\xb9' : '1',
|
||||
b'\xba' : '(th)',
|
||||
b'\xbb' : '>>',
|
||||
b'\xbc' : '1/4',
|
||||
b'\xbd' : '1/2',
|
||||
b'\xbe' : '3/4',
|
||||
b'\xbf' : '?',
|
||||
b'\xc0' : 'A',
|
||||
b'\xc1' : 'A',
|
||||
b'\xc2' : 'A',
|
||||
b'\xc3' : 'A',
|
||||
b'\xc4' : 'A',
|
||||
b'\xc5' : 'A',
|
||||
b'\xc6' : 'AE',
|
||||
b'\xc7' : 'C',
|
||||
b'\xc8' : 'E',
|
||||
b'\xc9' : 'E',
|
||||
b'\xca' : 'E',
|
||||
b'\xcb' : 'E',
|
||||
b'\xcc' : 'I',
|
||||
b'\xcd' : 'I',
|
||||
b'\xce' : 'I',
|
||||
b'\xcf' : 'I',
|
||||
b'\xd0' : 'D',
|
||||
b'\xd1' : 'N',
|
||||
b'\xd2' : 'O',
|
||||
b'\xd3' : 'O',
|
||||
b'\xd4' : 'O',
|
||||
b'\xd5' : 'O',
|
||||
b'\xd6' : 'O',
|
||||
b'\xd7' : '*',
|
||||
b'\xd8' : 'O',
|
||||
b'\xd9' : 'U',
|
||||
b'\xda' : 'U',
|
||||
b'\xdb' : 'U',
|
||||
b'\xdc' : 'U',
|
||||
b'\xdd' : 'Y',
|
||||
b'\xde' : 'b',
|
||||
b'\xdf' : 'B',
|
||||
b'\xe0' : 'a',
|
||||
b'\xe1' : 'a',
|
||||
b'\xe2' : 'a',
|
||||
b'\xe3' : 'a',
|
||||
b'\xe4' : 'a',
|
||||
b'\xe5' : 'a',
|
||||
b'\xe6' : 'ae',
|
||||
b'\xe7' : 'c',
|
||||
b'\xe8' : 'e',
|
||||
b'\xe9' : 'e',
|
||||
b'\xea' : 'e',
|
||||
b'\xeb' : 'e',
|
||||
b'\xec' : 'i',
|
||||
b'\xed' : 'i',
|
||||
b'\xee' : 'i',
|
||||
b'\xef' : 'i',
|
||||
b'\xf0' : 'o',
|
||||
b'\xf1' : 'n',
|
||||
b'\xf2' : 'o',
|
||||
b'\xf3' : 'o',
|
||||
b'\xf4' : 'o',
|
||||
b'\xf5' : 'o',
|
||||
b'\xf6' : 'o',
|
||||
b'\xf7' : '/',
|
||||
b'\xf8' : 'o',
|
||||
b'\xf9' : 'u',
|
||||
b'\xfa' : 'u',
|
||||
b'\xfb' : 'u',
|
||||
b'\xfc' : 'u',
|
||||
b'\xfd' : 'y',
|
||||
b'\xfe' : 'b',
|
||||
b'\xff' : 'y',
|
||||
}
|
||||
|
||||
# A map used when removing rogue Windows-1252/ISO-8859-1
|
||||
# characters in otherwise UTF-8 documents.
|
||||
#
|
||||
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
|
||||
# Windows-1252.
|
||||
WINDOWS_1252_TO_UTF8 = {
|
||||
0x80 : b'\xe2\x82\xac', # €
|
||||
0x82 : b'\xe2\x80\x9a', # ‚
|
||||
0x83 : b'\xc6\x92', # ƒ
|
||||
0x84 : b'\xe2\x80\x9e', # „
|
||||
0x85 : b'\xe2\x80\xa6', # …
|
||||
0x86 : b'\xe2\x80\xa0', # †
|
||||
0x87 : b'\xe2\x80\xa1', # ‡
|
||||
0x88 : b'\xcb\x86', # ˆ
|
||||
0x89 : b'\xe2\x80\xb0', # ‰
|
||||
0x8a : b'\xc5\xa0', # Š
|
||||
0x8b : b'\xe2\x80\xb9', # ‹
|
||||
0x8c : b'\xc5\x92', # Œ
|
||||
0x8e : b'\xc5\xbd', # Ž
|
||||
0x91 : b'\xe2\x80\x98', # ‘
|
||||
0x92 : b'\xe2\x80\x99', # ’
|
||||
0x93 : b'\xe2\x80\x9c', # “
|
||||
0x94 : b'\xe2\x80\x9d', # ”
|
||||
0x95 : b'\xe2\x80\xa2', # •
|
||||
0x96 : b'\xe2\x80\x93', # –
|
||||
0x97 : b'\xe2\x80\x94', # —
|
||||
0x98 : b'\xcb\x9c', # ˜
|
||||
0x99 : b'\xe2\x84\xa2', # ™
|
||||
0x9a : b'\xc5\xa1', # š
|
||||
0x9b : b'\xe2\x80\xba', # ›
|
||||
0x9c : b'\xc5\x93', # œ
|
||||
0x9e : b'\xc5\xbe', # ž
|
||||
0x9f : b'\xc5\xb8', # Ÿ
|
||||
0xa0 : b'\xc2\xa0', #
|
||||
0xa1 : b'\xc2\xa1', # ¡
|
||||
0xa2 : b'\xc2\xa2', # ¢
|
||||
0xa3 : b'\xc2\xa3', # £
|
||||
0xa4 : b'\xc2\xa4', # ¤
|
||||
0xa5 : b'\xc2\xa5', # ¥
|
||||
0xa6 : b'\xc2\xa6', # ¦
|
||||
0xa7 : b'\xc2\xa7', # §
|
||||
0xa8 : b'\xc2\xa8', # ¨
|
||||
0xa9 : b'\xc2\xa9', # ©
|
||||
0xaa : b'\xc2\xaa', # ª
|
||||
0xab : b'\xc2\xab', # «
|
||||
0xac : b'\xc2\xac', # ¬
|
||||
0xad : b'\xc2\xad', #
|
||||
0xae : b'\xc2\xae', # ®
|
||||
0xaf : b'\xc2\xaf', # ¯
|
||||
0xb0 : b'\xc2\xb0', # °
|
||||
0xb1 : b'\xc2\xb1', # ±
|
||||
0xb2 : b'\xc2\xb2', # ²
|
||||
0xb3 : b'\xc2\xb3', # ³
|
||||
0xb4 : b'\xc2\xb4', # ´
|
||||
0xb5 : b'\xc2\xb5', # µ
|
||||
0xb6 : b'\xc2\xb6', # ¶
|
||||
0xb7 : b'\xc2\xb7', # ·
|
||||
0xb8 : b'\xc2\xb8', # ¸
|
||||
0xb9 : b'\xc2\xb9', # ¹
|
||||
0xba : b'\xc2\xba', # º
|
||||
0xbb : b'\xc2\xbb', # »
|
||||
0xbc : b'\xc2\xbc', # ¼
|
||||
0xbd : b'\xc2\xbd', # ½
|
||||
0xbe : b'\xc2\xbe', # ¾
|
||||
0xbf : b'\xc2\xbf', # ¿
|
||||
0xc0 : b'\xc3\x80', # À
|
||||
0xc1 : b'\xc3\x81', # Á
|
||||
0xc2 : b'\xc3\x82', # Â
|
||||
0xc3 : b'\xc3\x83', # Ã
|
||||
0xc4 : b'\xc3\x84', # Ä
|
||||
0xc5 : b'\xc3\x85', # Å
|
||||
0xc6 : b'\xc3\x86', # Æ
|
||||
0xc7 : b'\xc3\x87', # Ç
|
||||
0xc8 : b'\xc3\x88', # È
|
||||
0xc9 : b'\xc3\x89', # É
|
||||
0xca : b'\xc3\x8a', # Ê
|
||||
0xcb : b'\xc3\x8b', # Ë
|
||||
0xcc : b'\xc3\x8c', # Ì
|
||||
0xcd : b'\xc3\x8d', # Í
|
||||
0xce : b'\xc3\x8e', # Î
|
||||
0xcf : b'\xc3\x8f', # Ï
|
||||
0xd0 : b'\xc3\x90', # Ð
|
||||
0xd1 : b'\xc3\x91', # Ñ
|
||||
0xd2 : b'\xc3\x92', # Ò
|
||||
0xd3 : b'\xc3\x93', # Ó
|
||||
0xd4 : b'\xc3\x94', # Ô
|
||||
0xd5 : b'\xc3\x95', # Õ
|
||||
0xd6 : b'\xc3\x96', # Ö
|
||||
0xd7 : b'\xc3\x97', # ×
|
||||
0xd8 : b'\xc3\x98', # Ø
|
||||
0xd9 : b'\xc3\x99', # Ù
|
||||
0xda : b'\xc3\x9a', # Ú
|
||||
0xdb : b'\xc3\x9b', # Û
|
||||
0xdc : b'\xc3\x9c', # Ü
|
||||
0xdd : b'\xc3\x9d', # Ý
|
||||
0xde : b'\xc3\x9e', # Þ
|
||||
0xdf : b'\xc3\x9f', # ß
|
||||
0xe0 : b'\xc3\xa0', # à
|
||||
0xe1 : b'\xa1', # á
|
||||
0xe2 : b'\xc3\xa2', # â
|
||||
0xe3 : b'\xc3\xa3', # ã
|
||||
0xe4 : b'\xc3\xa4', # ä
|
||||
0xe5 : b'\xc3\xa5', # å
|
||||
0xe6 : b'\xc3\xa6', # æ
|
||||
0xe7 : b'\xc3\xa7', # ç
|
||||
0xe8 : b'\xc3\xa8', # è
|
||||
0xe9 : b'\xc3\xa9', # é
|
||||
0xea : b'\xc3\xaa', # ê
|
||||
0xeb : b'\xc3\xab', # ë
|
||||
0xec : b'\xc3\xac', # ì
|
||||
0xed : b'\xc3\xad', # í
|
||||
0xee : b'\xc3\xae', # î
|
||||
0xef : b'\xc3\xaf', # ï
|
||||
0xf0 : b'\xc3\xb0', # ð
|
||||
0xf1 : b'\xc3\xb1', # ñ
|
||||
0xf2 : b'\xc3\xb2', # ò
|
||||
0xf3 : b'\xc3\xb3', # ó
|
||||
0xf4 : b'\xc3\xb4', # ô
|
||||
0xf5 : b'\xc3\xb5', # õ
|
||||
0xf6 : b'\xc3\xb6', # ö
|
||||
0xf7 : b'\xc3\xb7', # ÷
|
||||
0xf8 : b'\xc3\xb8', # ø
|
||||
0xf9 : b'\xc3\xb9', # ù
|
||||
0xfa : b'\xc3\xba', # ú
|
||||
0xfb : b'\xc3\xbb', # û
|
||||
0xfc : b'\xc3\xbc', # ü
|
||||
0xfd : b'\xc3\xbd', # ý
|
||||
0xfe : b'\xc3\xbe', # þ
|
||||
}
|
||||
|
||||
MULTIBYTE_MARKERS_AND_SIZES = [
|
||||
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
|
||||
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
|
||||
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
|
||||
]
|
||||
|
||||
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
|
||||
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
|
||||
|
||||
@classmethod
|
||||
def detwingle(cls, in_bytes, main_encoding="utf8",
|
||||
embedded_encoding="windows-1252"):
|
||||
"""Fix characters from one encoding embedded in some other encoding.
|
||||
|
||||
Currently the only situation supported is Windows-1252 (or its
|
||||
subset ISO-8859-1), embedded in UTF-8.
|
||||
|
||||
The input must be a bytestring. If you've already converted
|
||||
the document to Unicode, you're too late.
|
||||
|
||||
The output is a bytestring in which `embedded_encoding`
|
||||
characters have been converted to their `main_encoding`
|
||||
equivalents.
|
||||
"""
|
||||
if embedded_encoding.replace('_', '-').lower() not in (
|
||||
'windows-1252', 'windows_1252'):
|
||||
raise NotImplementedError(
|
||||
"Windows-1252 and ISO-8859-1 are the only currently supported "
|
||||
"embedded encodings.")
|
||||
|
||||
if main_encoding.lower() not in ('utf8', 'utf-8'):
|
||||
raise NotImplementedError(
|
||||
"UTF-8 is the only currently supported main encoding.")
|
||||
|
||||
byte_chunks = []
|
||||
|
||||
chunk_start = 0
|
||||
pos = 0
|
||||
while pos < len(in_bytes):
|
||||
byte = in_bytes[pos]
|
||||
if not isinstance(byte, int):
|
||||
# Python 2.x
|
||||
byte = ord(byte)
|
||||
if (byte >= cls.FIRST_MULTIBYTE_MARKER
|
||||
and byte <= cls.LAST_MULTIBYTE_MARKER):
|
||||
# This is the start of a UTF-8 multibyte character. Skip
|
||||
# to the end.
|
||||
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
|
||||
if byte >= start and byte <= end:
|
||||
pos += size
|
||||
break
|
||||
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
|
||||
# We found a Windows-1252 character!
|
||||
# Save the string up to this point as a chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:pos])
|
||||
|
||||
# Now translate the Windows-1252 character into UTF-8
|
||||
# and add it as another, one-byte chunk.
|
||||
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
|
||||
pos += 1
|
||||
chunk_start = pos
|
||||
else:
|
||||
# Go on to the next character.
|
||||
pos += 1
|
||||
if chunk_start == 0:
|
||||
# The string is unchanged.
|
||||
return in_bytes
|
||||
else:
|
||||
# Store the final chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:])
|
||||
return b''.join(byte_chunks)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
import cProfile
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
|
||||
import os
|
||||
import pstats
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import sys
|
||||
import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
for builder in builder_registry.builders:
|
||||
if name in builder.features:
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
|
||||
This lets you see how lxml parses a document when no Beautiful
|
||||
Soup code is running.
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
|
||||
def _p(self, s):
|
||||
print(s)
|
||||
|
||||
def handle_starttag(self, name, attrs):
|
||||
self._p("%s START" % name)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self._p("%s END" % name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self._p("%s DATA" % data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
self._p("%s CHARREF" % name)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
self._p("%s ENTITYREF" % name)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self._p("%s COMMENT" % data)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self._p("%s DECL" % data)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
self._p("%s UNKNOWN-DECL" % data)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self._p("%s PI" % data)
|
||||
|
||||
def htmlparser_trace(data):
|
||||
"""Print out the HTMLParser events that occur during parsing.
|
||||
|
||||
This lets you see how HTMLParser parses a document when no
|
||||
Beautiful Soup code is running.
|
||||
"""
|
||||
parser = AnnouncingParser()
|
||||
parser.feed(data)
|
||||
|
||||
_vowels = "aeiou"
|
||||
_consonants = "bcdfghjklmnpqrstvwxyz"
|
||||
|
||||
def rword(length=5):
|
||||
"Generate a random word-like string."
|
||||
s = ''
|
||||
for i in range(length):
|
||||
if i % 2 == 0:
|
||||
t = _consonants
|
||||
else:
|
||||
t = _vowels
|
||||
s += random.choice(t)
|
||||
return s
|
||||
|
||||
def rsentence(length=4):
|
||||
"Generate a random sentence-like string."
|
||||
return " ".join(rword(random.randint(4,9)) for i in range(length))
|
||||
|
||||
def rdoc(num_elements=1000):
|
||||
"""Randomly generate an invalid HTML document."""
|
||||
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
|
||||
elements = []
|
||||
for i in range(num_elements):
|
||||
choice = random.randint(0,3)
|
||||
if choice == 0:
|
||||
# New tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("<%s>" % tag_name)
|
||||
elif choice == 1:
|
||||
elements.append(rsentence(random.randint(1,4)))
|
||||
elif choice == 2:
|
||||
# Close a tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("</%s>" % tag_name)
|
||||
return "<html>" + "\n".join(elements) + "</html>"
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
try:
|
||||
a = time.time()
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
|
||||
data = rdoc(num_elements)
|
||||
vars = dict(bs4=bs4, data=data, parser=parser)
|
||||
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
|
||||
|
||||
stats = pstats.Stats(filename)
|
||||
# stats.strip_dirs()
|
||||
stats.sort_stats("cumulative")
|
||||
stats.print_stats('_html5lib|bs4', 50)
|
||||
|
||||
if __name__ == '__main__':
|
||||
diagnose(sys.stdin.read())
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,592 +0,0 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
Comment,
|
||||
ContentMetaAttributeValue,
|
||||
Doctype,
|
||||
SoupStrainer,
|
||||
)
|
||||
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
default_builder = HTMLParserTreeBuilder
|
||||
|
||||
|
||||
class SoupTest(unittest.TestCase):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return default_builder()
|
||||
|
||||
def soup(self, markup, **kwargs):
|
||||
"""Build a Beautiful Soup object from markup."""
|
||||
builder = kwargs.pop('builder', self.default_builder)
|
||||
return BeautifulSoup(markup, builder=builder, **kwargs)
|
||||
|
||||
def document_for(self, markup):
|
||||
"""Turn an HTML fragment into a document.
|
||||
|
||||
The details depend on the builder.
|
||||
"""
|
||||
return self.default_builder.test_fragment_to_document(markup)
|
||||
|
||||
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
|
||||
builder = self.default_builder
|
||||
obj = BeautifulSoup(to_parse, builder=builder)
|
||||
if compare_parsed_to is None:
|
||||
compare_parsed_to = to_parse
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
"""A basic test of a treebuilder's competence.
|
||||
|
||||
Any HTML treebuilder, present or future, should be able to pass
|
||||
these tests. With invalid markup, there's room for interpretation,
|
||||
and different parsers can handle it differently. But with the
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
|
||||
# Make sure a Doctype object was created.
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual(doctype.__class__, Doctype)
|
||||
self.assertEqual(doctype, doctype_fragment)
|
||||
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
|
||||
|
||||
# Make sure that the doctype was correctly associated with the
|
||||
# parse tree and that the rest of the document parsed.
|
||||
self.assertEqual(soup.p.contents[0], 'foo')
|
||||
|
||||
def _document_with_doctype(self, doctype_fragment):
|
||||
"""Generate and parse a document with the given doctype."""
|
||||
doctype = '<!DOCTYPE %s>' % doctype_fragment
|
||||
markup = doctype + '\n<p>foo</p>'
|
||||
soup = self.soup(markup)
|
||||
return doctype, soup
|
||||
|
||||
def test_normal_doctypes(self):
|
||||
"""Make sure normal, everyday HTML doctypes are handled correctly."""
|
||||
self.assertDoctypeHandled("html")
|
||||
self.assertDoctypeHandled(
|
||||
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
|
||||
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_public_doctype_with_url(self):
|
||||
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
|
||||
self.assertDoctypeHandled(doctype)
|
||||
|
||||
def test_system_doctype(self):
|
||||
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# We can handle a namespaced doctype with a system ID.
|
||||
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# Test a namespaced doctype with a public id.
|
||||
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out more or less the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
This is important because the builder is part of a
|
||||
BeautifulSoup object, and we want to be able to copy that.
|
||||
"""
|
||||
copy.deepcopy(self.default_builder)
|
||||
|
||||
def test_p_tag_is_never_empty_element(self):
|
||||
"""A <p> tag is never designated as an empty-element tag.
|
||||
|
||||
Even if the markup shows it as an empty-element tag, it
|
||||
shouldn't be presented that way.
|
||||
"""
|
||||
soup = self.soup("<p/>")
|
||||
self.assertFalse(soup.p.is_empty_element)
|
||||
self.assertEqual(str(soup.p), "<p></p>")
|
||||
|
||||
def test_unclosed_tags_get_closed(self):
|
||||
"""A tag that's not closed by the end of the document should be closed.
|
||||
|
||||
This applies to all tags except empty-element tags.
|
||||
"""
|
||||
self.assertSoupEquals("<p>", "<p></p>")
|
||||
self.assertSoupEquals("<b>", "<b></b>")
|
||||
|
||||
self.assertSoupEquals("<br>", "<br/>")
|
||||
|
||||
def test_br_is_always_empty_element_tag(self):
|
||||
"""A <br> tag is designated as an empty-element tag.
|
||||
|
||||
Some parsers treat <br></br> as one <br/> tag, some parsers as
|
||||
two tags, but it should always be an empty-element tag.
|
||||
"""
|
||||
soup = self.soup("<br></br>")
|
||||
self.assertTrue(soup.br.is_empty_element)
|
||||
self.assertEqual(str(soup.br), "<br/>")
|
||||
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
self.assertSoupEquals(markup)
|
||||
|
||||
soup = self.soup(markup)
|
||||
comment = soup.find(text="foobar")
|
||||
self.assertEqual(comment.__class__, Comment)
|
||||
|
||||
# The comment is properly integrated into the tree.
|
||||
foo = soup.find(text="foo")
|
||||
self.assertEqual(comment, foo.next_element)
|
||||
baz = soup.find(text="baz")
|
||||
self.assertEqual(comment, baz.previous_element)
|
||||
|
||||
def test_preserved_whitespace_in_pre_and_textarea(self):
|
||||
"""Whitespace must be preserved in <pre> and <textarea> tags."""
|
||||
self.assertSoupEquals("<pre> </pre>")
|
||||
self.assertSoupEquals("<textarea> woo </textarea>")
|
||||
|
||||
def test_nested_inline_elements(self):
|
||||
"""Inline elements can be nested indefinitely."""
|
||||
b_tag = "<b>Inside a B tag</b>"
|
||||
self.assertSoupEquals(b_tag)
|
||||
|
||||
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
def test_nested_block_level_elements(self):
|
||||
"""Block elements can be nested."""
|
||||
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
|
||||
blockquote = soup.blockquote
|
||||
self.assertEqual(blockquote.p.b.string, 'Foo')
|
||||
self.assertEqual(blockquote.b.string, 'Foo')
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""One table can go inside another one."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tr><td>foo</td></tr></table>'
|
||||
'</td></tr></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_deeply_nested_multivalued_attribute(self):
|
||||
# html5lib can set the attributes of the same tag many times
|
||||
# as it rearranges the tree. This has caused problems with
|
||||
# multivalued attributes.
|
||||
markup = '<table><div><div class="css"></div></div></table>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
|
||||
def test_quot_entity_converted_to_quotation_mark(self):
|
||||
self.assertSoupEquals("<p>I said "good day!"</p>",
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
|
||||
def test_multipart_strings(self):
|
||||
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
very least they should not choke on namespaces or lose
|
||||
data."""
|
||||
|
||||
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode())
|
||||
html = soup.html
|
||||
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
|
||||
|
||||
def test_multivalued_attribute_value_becomes_list(self):
|
||||
markup = b'<a class="foo bar">'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(['foo', 'bar'], soup.a['class'])
|
||||
|
||||
#
|
||||
# Generally speaking, tests below this point are more tests of
|
||||
# Beautiful Soup than tests of the tree builders. But parsers are
|
||||
# weird, so we run these tests separately for every tree builder
|
||||
# to detect any differences between them.
|
||||
#
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
|
||||
parse_only=strainer)
|
||||
self.assertEqual(soup.decode(), "<b>bold</b>")
|
||||
|
||||
def test_single_quote_attribute_values_become_double_quotes(self):
|
||||
self.assertSoupEquals("<foo attr='bar'></foo>",
|
||||
'<foo attr="bar"></foo>')
|
||||
|
||||
def test_attribute_values_with_nested_quotes_are_left_alone(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
self.assertSoupEquals(text)
|
||||
|
||||
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
soup = self.soup(text)
|
||||
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
|
||||
self.assertSoupEquals(
|
||||
soup.foo.decode(),
|
||||
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
|
||||
|
||||
def test_ampersand_in_attribute_value_gets_escaped(self):
|
||||
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
|
||||
'<this is="really messed up & stuff"></this>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>',
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>')
|
||||
|
||||
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
|
||||
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
|
||||
|
||||
def test_entities_in_strings_converted_during_parsing(self):
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
# Microsoft smart quotes are converted to Unicode characters during
|
||||
# parsing.
|
||||
quote = b"<p>\x91Foo\x92</p>"
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
def test_real_iso_latin_document(self):
|
||||
# Smoke test of interrelated functionality, using an
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
iso_latin_html = unicode_html.encode("iso-8859-1")
|
||||
|
||||
# Parse the ISO-Latin-1 HTML.
|
||||
soup = self.soup(iso_latin_html)
|
||||
# Encode it to UTF-8.
|
||||
result = soup.encode("utf-8")
|
||||
|
||||
# What do we expect the result to look like? Well, it would
|
||||
# look like unicode_html, except that the META tag would say
|
||||
# UTF-8 instead of ISO-Latin-1.
|
||||
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
|
||||
|
||||
# And, of course, it would be in UTF-8, not Unicode.
|
||||
expected = expected.encode("utf-8")
|
||||
|
||||
# Ta-da!
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_real_shift_jis_document(self):
|
||||
# Smoke test to make sure the parser can handle a document in
|
||||
# Shift-JIS encoding, without choking.
|
||||
shift_jis_html = (
|
||||
b'<html><head></head><body><pre>'
|
||||
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
|
||||
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
|
||||
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
|
||||
b'</pre></body></html>')
|
||||
unicode_html = shift_jis_html.decode("shift-jis")
|
||||
soup = self.soup(unicode_html)
|
||||
|
||||
# Make sure the parse tree is correctly encoded to various
|
||||
# encodings.
|
||||
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
|
||||
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
|
||||
|
||||
def test_real_hebrew_document(self):
|
||||
# A real-world test to make sure we can convert ISO-8859-9 (a
|
||||
# Hebrew encoding) to UTF-8.
|
||||
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
|
||||
soup = self.soup(
|
||||
hebrew_document, from_encoding="iso8859-8")
|
||||
self.assertEqual(soup.original_encoding, 'iso8859-8')
|
||||
self.assertEqual(
|
||||
soup.encode('utf-8'),
|
||||
hebrew_document.decode("iso8859-8").encode("utf-8"))
|
||||
|
||||
def test_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta content="text/html; charset=x-sjis" '
|
||||
'http-equiv="Content-type"/>')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
|
||||
content = parsed_meta['content']
|
||||
self.assertEqual('text/html; charset=x-sjis', content)
|
||||
|
||||
# But that value is actually a ContentMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
|
||||
|
||||
# For the rest of the story, see TestSubstitutions in
|
||||
# test_tree.py.
|
||||
|
||||
def test_html5_style_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', id="encoding")
|
||||
charset = parsed_meta['charset']
|
||||
self.assertEqual('x-sjis', charset)
|
||||
|
||||
# But that value is actually a CharsetMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('utf8', charset.encode("utf8"))
|
||||
|
||||
def test_tag_with_no_attributes_can_have_attributes_added(self):
|
||||
data = self.soup("<a>text</a>")
|
||||
data.a['foo'] = 'bar'
|
||||
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8"), markup)
|
||||
|
||||
def test_formatter_processes_script_tag_for_xml_documents(self):
|
||||
doc = """
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
encoded = soup.encode()
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode("latin1"),
|
||||
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
|
||||
|
||||
def test_large_xml_document(self):
|
||||
"""A large XML document should come out the same as it went in."""
|
||||
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
|
||||
+ b'0' * (2**12)
|
||||
+ b'</root>')
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(soup.encode("utf-8"), markup)
|
||||
|
||||
|
||||
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
|
||||
self.assertSoupEquals("<p>", "<p/>")
|
||||
self.assertSoupEquals("<p>foo</p>")
|
||||
|
||||
def test_namespaces_are_preserved(self):
|
||||
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
|
||||
soup = self.soup(markup)
|
||||
root = soup.root
|
||||
self.assertEqual("http://example.com/", root['xmlns:a'])
|
||||
self.assertEqual("http://example.net/", root['xmlns:b'])
|
||||
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
|
||||
# XHTML documents in any particular way.
|
||||
pass
|
||||
|
||||
def test_html_tags_have_namespace(self):
|
||||
markup = "<a>"
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
|
||||
|
||||
def test_svg_tags_have_namespace(self):
|
||||
markup = '<svg><circle/></svg>'
|
||||
soup = self.soup(markup)
|
||||
namespace = "http://www.w3.org/2000/svg"
|
||||
self.assertEqual(namespace, soup.svg.namespace)
|
||||
self.assertEqual(namespace, soup.circle.namespace)
|
||||
|
||||
|
||||
def test_mathml_tags_have_namespace(self):
|
||||
markup = '<math><msqrt>5</msqrt></math>'
|
||||
soup = self.soup(markup)
|
||||
namespace = 'http://www.w3.org/1998/Math/MathML'
|
||||
self.assertEqual(namespace, soup.math.namespace)
|
||||
self.assertEqual(namespace, soup.msqrt.namespace)
|
||||
|
||||
def test_xml_declaration_becomes_comment(self):
|
||||
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertTrue(isinstance(soup.contents[0], Comment))
|
||||
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
|
||||
self.assertEqual("html", soup.contents[0].next_element.name)
|
||||
|
||||
def skipIf(condition, reason):
|
||||
def nothing(test, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def decorator(test_item):
|
||||
if condition:
|
||||
return nothing
|
||||
else:
|
||||
return test_item
|
||||
|
||||
return decorator
|
||||
@@ -1 +0,0 @@
|
||||
"The beautifulsoup tests."
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
builder_registry as registry,
|
||||
HTMLParserTreeBuilder,
|
||||
TreeBuilderRegistry,
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError:
|
||||
HTML5LIB_PRESENT = False
|
||||
|
||||
try:
|
||||
from bs4.builder import (
|
||||
LXMLTreeBuilderForXML,
|
||||
LXMLTreeBuilder,
|
||||
)
|
||||
LXML_PRESENT = True
|
||||
except ImportError:
|
||||
LXML_PRESENT = False
|
||||
|
||||
|
||||
class BuiltInRegistryTest(unittest.TestCase):
|
||||
"""Test the built-in registry with the default builders registered."""
|
||||
|
||||
def test_combination(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('fast', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('permissive', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('strict', 'html'),
|
||||
HTMLParserTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib', 'html'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
def test_lookup_by_markup_type(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
|
||||
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('xml'), None)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
|
||||
|
||||
def test_named_library(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('lxml', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('lxml', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
self.assertEqual(registry.lookup('html.parser'),
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
self.assertRaises(ValueError, BeautifulSoup,
|
||||
"", features="no-such-feature")
|
||||
|
||||
class RegistryTest(unittest.TestCase):
|
||||
"""Test the TreeBuilderRegistry class in general."""
|
||||
|
||||
def setUp(self):
|
||||
self.registry = TreeBuilderRegistry()
|
||||
|
||||
def builder_for_features(self, *feature_list):
|
||||
cls = type('Builder_' + '_'.join(feature_list),
|
||||
(object,), {'features' : feature_list})
|
||||
|
||||
self.registry.register(cls)
|
||||
return cls
|
||||
|
||||
def test_register_with_no_features(self):
|
||||
builder = self.builder_for_features()
|
||||
|
||||
# Since the builder advertises no features, you can't find it
|
||||
# by looking up features.
|
||||
self.assertEqual(self.registry.lookup('foo'), None)
|
||||
|
||||
# But you can find it by doing a lookup with no features, if
|
||||
# this happens to be the only registered builder.
|
||||
self.assertEqual(self.registry.lookup(), builder)
|
||||
|
||||
def test_register_with_features_makes_lookup_succeed(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('foo'), builder)
|
||||
self.assertEqual(self.registry.lookup('bar'), builder)
|
||||
|
||||
def test_lookup_fails_when_no_builder_implements_feature(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('baz'), None)
|
||||
|
||||
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
|
||||
builder1 = self.builder_for_features('foo')
|
||||
builder2 = self.builder_for_features('bar')
|
||||
self.assertEqual(self.registry.lookup(), builder2)
|
||||
|
||||
def test_lookup_fails_when_no_tree_builders_registered(self):
|
||||
self.assertEqual(self.registry.lookup(), None)
|
||||
|
||||
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
|
||||
has_one = self.builder_for_features('foo')
|
||||
has_the_other = self.builder_for_features('bar')
|
||||
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
|
||||
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
|
||||
lacks_one = self.builder_for_features('bar')
|
||||
has_the_other = self.builder_for_features('foo')
|
||||
|
||||
# There are two builders featuring 'foo' and 'bar', but
|
||||
# the one that also features 'quux' was registered later.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar'),
|
||||
has_both_late)
|
||||
|
||||
# There is only one builder featuring 'foo', 'bar', and 'baz'.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
|
||||
has_both_early)
|
||||
|
||||
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
|
||||
builder1 = self.builder_for_features('foo', 'bar')
|
||||
builder2 = self.builder_for_features('foo', 'baz')
|
||||
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
||||
@@ -1,36 +0,0 @@
|
||||
"Test harness for doctests."
|
||||
|
||||
# pylint: disable-msg=E0611,W0142
|
||||
|
||||
__metaclass__ = type
|
||||
__all__ = [
|
||||
'additional_tests',
|
||||
]
|
||||
|
||||
import atexit
|
||||
import doctest
|
||||
import os
|
||||
#from pkg_resources import (
|
||||
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
|
||||
import unittest
|
||||
|
||||
DOCTEST_FLAGS = (
|
||||
doctest.ELLIPSIS |
|
||||
doctest.NORMALIZE_WHITESPACE |
|
||||
doctest.REPORT_NDIFF)
|
||||
|
||||
|
||||
# def additional_tests():
|
||||
# "Run the doc tests (README.txt and docs/*, if any exist)"
|
||||
# doctest_files = [
|
||||
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
|
||||
# if resource_exists('bs4', 'docs'):
|
||||
# for name in resource_listdir('bs4', 'docs'):
|
||||
# if name.endswith('.txt'):
|
||||
# doctest_files.append(
|
||||
# os.path.abspath(
|
||||
# resource_filename('bs4', 'docs/%s' % name)))
|
||||
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
|
||||
# atexit.register(cleanup_resources)
|
||||
# return unittest.TestSuite((
|
||||
# doctest.DocFileSuite(*doctest_files, **kwargs)))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Tests to ensure that the html5lib tree builder generates good trees."""
|
||||
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
HTML5TreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not HTML5LIB_PRESENT,
|
||||
"html5lib seems not to be present, not testing its tree builder.")
|
||||
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
"""See ``HTML5TreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTML5TreeBuilder()
|
||||
|
||||
def test_soupstrainer(self):
|
||||
# The html5lib tree builder does not support SoupStrainers.
|
||||
strainer = SoupStrainer("b")
|
||||
markup = "<p>A <b>bold</b> statement.</p>"
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(
|
||||
soup.decode(), self.document_for(markup))
|
||||
|
||||
self.assertTrue(
|
||||
"the html5lib tree builder doesn't support parse_only" in
|
||||
str(w[0].message))
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""html5lib inserts <tbody> tags where other parsers don't."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tbody><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
|
||||
'</td></tr></tbody></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_xml_declaration_followed_by_doctype(self):
|
||||
markup = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<p>foo</p>
|
||||
</body>
|
||||
</html>'''
|
||||
soup = self.soup(markup)
|
||||
# Verify that we can reach the <p> tag; this means the tree is connected.
|
||||
self.assertEqual(b"<p>foo</p>", soup.p.encode())
|
||||
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Tests to ensure that the html.parser tree builder generates good
|
||||
trees."""
|
||||
|
||||
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
|
||||
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTMLParserTreeBuilder()
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Tests to ensure that the lxml tree builder generates good trees."""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import lxml.etree
|
||||
LXML_PRESENT = True
|
||||
LXML_VERSION = lxml.etree.LXML_VERSION
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
LXML_VERSION = (0,)
|
||||
|
||||
if LXML_PRESENT:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, SoupStrainer
|
||||
from bs4.testing import skipIf
|
||||
from bs4.tests import test_htmlparser
|
||||
from bs4.testing import (
|
||||
HTMLTreeBuilderSmokeTest,
|
||||
XMLTreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its tree builder.")
|
||||
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilder()
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
|
||||
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
|
||||
# test if an old version of lxml is installed.
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
|
||||
"Skipping doctype test for old version of lxml to avoid segfault.")
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_beautifulstonesoup_is_xml_parser(self):
|
||||
# Make sure that the deprecated BSS class uses an xml builder
|
||||
# if one is installed.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = BeautifulStoneSoup("<b />")
|
||||
self.assertEqual(u"<b/>", unicode(soup.b))
|
||||
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""lxml strips the XML definition from an XHTML doc, which is fine."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b''),
|
||||
markup.replace(b'\n', b'').replace(
|
||||
b'<?xml version="1.0" encoding="utf-8"?>', b''))
|
||||
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its XML tree builder.")
|
||||
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilderForXML()
|
||||
@@ -1,434 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests of Beautiful Soup as a whole."""
|
||||
|
||||
import logging
|
||||
import unittest
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
SoupStrainer,
|
||||
NamespacedAttribute,
|
||||
)
|
||||
import bs4.dammit
|
||||
from bs4.dammit import (
|
||||
EntitySubstitution,
|
||||
UnicodeDammit,
|
||||
)
|
||||
from bs4.testing import (
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
LXML_PRESENT = True
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
|
||||
PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
|
||||
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
|
||||
|
||||
class TestConstructor(SoupTest):
|
||||
|
||||
def test_short_unicode_input(self):
|
||||
data = u"<h1>éé</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"éé", soup.h1.string)
|
||||
|
||||
def test_embedded_null(self):
|
||||
data = u"<h1>foo\0bar</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"foo\0bar", soup.h1.string)
|
||||
|
||||
|
||||
class TestDeprecatedConstructorArguments(SoupTest):
|
||||
|
||||
def test_parseOnlyThese_renamed_to_parse_only(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("parseOnlyThese" in msg)
|
||||
self.assertTrue("parse_only" in msg)
|
||||
self.assertEqual(b"<b></b>", soup.encode())
|
||||
|
||||
def test_fromEncoding_renamed_to_from_encoding(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
utf8 = b"\xc3\xa9"
|
||||
soup = self.soup(utf8, fromEncoding="utf8")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("fromEncoding" in msg)
|
||||
self.assertTrue("from_encoding" in msg)
|
||||
self.assertEqual("utf8", soup.original_encoding)
|
||||
|
||||
def test_unrecognized_keyword_argument(self):
|
||||
self.assertRaises(
|
||||
TypeError, self.soup, "<a>", no_such_argument=True)
|
||||
|
||||
class TestWarnings(SoupTest):
|
||||
|
||||
def test_disk_file_warning(self):
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
try:
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a filename" in msg)
|
||||
finally:
|
||||
filehandle.close()
|
||||
|
||||
# The file no longer exists, so Beautiful Soup will no longer issue the warning.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
def test_url_warning(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a URL" in msg)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/ is great")
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
class TestSelectiveParsing(SoupTest):
|
||||
|
||||
def test_parse_with_soupstrainer(self):
|
||||
markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
|
||||
|
||||
|
||||
class TestEntitySubstitution(unittest.TestCase):
|
||||
"""Standalone tests of the EntitySubstitution class."""
|
||||
def setUp(self):
|
||||
self.sub = EntitySubstitution
|
||||
|
||||
def test_simple_html_substitution(self):
|
||||
# Unicode characters corresponding to named HTML entites
|
||||
# are substituted, and no others.
|
||||
s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
|
||||
self.assertEqual(self.sub.substitute_html(s),
|
||||
u"foo∀\N{SNOWMAN}õbar")
|
||||
|
||||
def test_smart_quote_substitution(self):
|
||||
# MS smart quotes are a common source of frustration, so we
|
||||
# give them a special test.
|
||||
quotes = b"\x91\x92foo\x93\x94"
|
||||
dammit = UnicodeDammit(quotes)
|
||||
self.assertEqual(self.sub.substitute_html(dammit.markup),
|
||||
"‘’foo“”")
|
||||
|
||||
def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, False), s)
|
||||
|
||||
def test_xml_attribute_quoting_normally_uses_double_quotes(self):
|
||||
self.assertEqual(self.sub.substitute_xml("Welcome", True),
|
||||
'"Welcome"')
|
||||
self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
|
||||
'"Bob\'s Bar"')
|
||||
|
||||
def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, True),
|
||||
"'Welcome to \"my bar\"'")
|
||||
|
||||
def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
|
||||
s = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml(s, True),
|
||||
'"Welcome to "Bob\'s Bar""')
|
||||
|
||||
def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
|
||||
quoted = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(quoted), quoted)
|
||||
|
||||
def test_xml_quoting_handles_angle_brackets(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("foo<bar>"),
|
||||
"foo<bar>")
|
||||
|
||||
def test_xml_quoting_handles_ampersands(self):
|
||||
self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T")
|
||||
|
||||
def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("ÁT&T"),
|
||||
"&Aacute;T&T")
|
||||
|
||||
def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml_containing_entities("ÁT&T"),
|
||||
"ÁT&T")
|
||||
|
||||
def test_quotes_not_html_substituted(self):
|
||||
"""There's no need to do this except inside attribute values."""
|
||||
text = 'Bob\'s "bar"'
|
||||
self.assertEqual(self.sub.substitute_html(text), text)
|
||||
|
||||
|
||||
class TestEncodingConversion(SoupTest):
|
||||
# Test Beautiful Soup's ability to decode and encode from various
|
||||
# encodings.
|
||||
|
||||
def setUp(self):
|
||||
super(TestEncodingConversion, self).setUp()
|
||||
self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
|
||||
self.utf8_data = self.unicode_data.encode("utf-8")
|
||||
# Just so you know what it looks like.
|
||||
self.assertEqual(
|
||||
self.utf8_data,
|
||||
b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
|
||||
|
||||
def test_ascii_in_unicode_out(self):
|
||||
# ASCII input is converted to Unicode. The original_encoding
|
||||
# attribute is set to 'utf-8', a superset of ASCII.
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
# Disable chardet, which will realize that the ASCII is ASCII.
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
ascii = b"<foo>a</foo>"
|
||||
soup_from_ascii = self.soup(ascii)
|
||||
unicode_output = soup_from_ascii.decode()
|
||||
self.assertTrue(isinstance(unicode_output, unicode))
|
||||
self.assertEqual(unicode_output, self.document_for(ascii.decode()))
|
||||
self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_unicode_in_unicode_out(self):
|
||||
# Unicode input is left alone. The original_encoding attribute
|
||||
# is not set.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
|
||||
self.assertEqual(soup_from_unicode.original_encoding, None)
|
||||
|
||||
def test_utf8_in_unicode_out(self):
|
||||
# UTF-8 input is converted to Unicode. The original_encoding
|
||||
# attribute is set.
|
||||
soup_from_utf8 = self.soup(self.utf8_data)
|
||||
self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
|
||||
|
||||
def test_utf8_out(self):
|
||||
# The internal data structures can be encoded as UTF-8.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
|
||||
|
||||
@skipIf(
|
||||
PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
|
||||
"Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
|
||||
def test_attribute_name_containing_unicode_characters(self):
|
||||
markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
|
||||
self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
|
||||
|
||||
class TestUnicodeDammit(unittest.TestCase):
|
||||
"""Standalone tests of UnicodeDammit."""
|
||||
|
||||
def test_unicode_input(self):
|
||||
markup = u"I'm already Unicode! \N{SNOWMAN}"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(dammit.unicode_markup, markup)
|
||||
|
||||
def test_smart_quotes_to_unicode(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
|
||||
|
||||
def test_smart_quotes_to_xml_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="xml")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_html_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="html")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_ascii(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, """<foo>''""</foo>""")
|
||||
|
||||
def test_detect_utf8(self):
|
||||
utf8 = b"\xc3\xa9"
|
||||
dammit = UnicodeDammit(utf8)
|
||||
self.assertEqual(dammit.unicode_markup, u'\xe9')
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_convert_hebrew(self):
|
||||
hebrew = b"\xed\xe5\xec\xf9"
|
||||
dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
|
||||
self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
|
||||
|
||||
def test_dont_see_smart_quotes_where_there_are_none(self):
|
||||
utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
|
||||
dammit = UnicodeDammit(utf_8)
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
|
||||
|
||||
def test_ignore_inappropriate_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_ignore_invalid_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
for bad_encoding in ['.utf8', '...', 'utF---16.!']:
|
||||
dammit = UnicodeDammit(utf8_data, [bad_encoding])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_detect_html5_style_meta_tag(self):
|
||||
|
||||
for data in (
|
||||
b'<html><meta charset="euc-jp" /></html>',
|
||||
b"<html><meta charset='euc-jp' /></html>",
|
||||
b"<html><meta charset=euc-jp /></html>",
|
||||
b"<html><meta charset=euc-jp/></html>"):
|
||||
dammit = UnicodeDammit(data, is_html=True)
|
||||
self.assertEqual(
|
||||
"euc-jp", dammit.original_encoding)
|
||||
|
||||
def test_last_ditch_entity_replacement(self):
|
||||
# This is a UTF-8 document that contains bytestrings
|
||||
# completely incompatible with UTF-8 (ie. encoded with some other
|
||||
# encoding).
|
||||
#
|
||||
# Since there is no consistent encoding for the document,
|
||||
# Unicode, Dammit will eventually encode the document as UTF-8
|
||||
# and encode the incompatible characters as REPLACEMENT
|
||||
# CHARACTER.
|
||||
#
|
||||
# If chardet is installed, it will detect that the document
|
||||
# can be converted into ISO-8859-1 without errors. This happens
|
||||
# to be the wrong encoding, but it is a consistent encoding, so the
|
||||
# code we're testing here won't run.
|
||||
#
|
||||
# So we temporarily disable chardet if it's present.
|
||||
doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
|
||||
<html><b>\330\250\330\252\330\261</b>
|
||||
<i>\310\322\321\220\312\321\355\344</i></html>"""
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
dammit = UnicodeDammit(doc)
|
||||
self.assertEqual(True, dammit.contains_replacement_characters)
|
||||
self.assertTrue(u"\ufffd" in dammit.unicode_markup)
|
||||
|
||||
soup = BeautifulSoup(doc, "html.parser")
|
||||
self.assertTrue(soup.contains_replacement_characters)
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_byte_order_mark_removed(self):
|
||||
# A document written in UTF-16LE will have its byte order marker stripped.
|
||||
data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
|
||||
dammit = UnicodeDammit(data)
|
||||
self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
|
||||
self.assertEqual("utf-16le", dammit.original_encoding)
|
||||
|
||||
def test_detwingle(self):
|
||||
# Here's a UTF8 document.
|
||||
utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
|
||||
|
||||
# Here's a Windows-1252 document.
|
||||
windows_1252 = (
|
||||
u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
|
||||
u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
|
||||
|
||||
# Through some unholy alchemy, they've been stuck together.
|
||||
doc = utf8 + windows_1252 + utf8
|
||||
|
||||
# The document can't be turned into UTF-8:
|
||||
self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
|
||||
|
||||
# Unicode, Dammit thinks the whole document is Windows-1252,
|
||||
# and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
|
||||
|
||||
# But if we run it through fix_embedded_windows_1252, it's fixed:
|
||||
|
||||
fixed = UnicodeDammit.detwingle(doc)
|
||||
self.assertEqual(
|
||||
u"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
|
||||
|
||||
def test_detwingle_ignores_multibyte_characters(self):
|
||||
# Each of these characters has a UTF-8 representation ending
|
||||
# in \x93. \x93 is a smart quote if interpreted as
|
||||
# Windows-1252. But our code knows to skip over multibyte
|
||||
# UTF-8 characters, so they'll survive the process unscathed.
|
||||
for tricky_unicode_char in (
|
||||
u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
|
||||
u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
|
||||
u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
|
||||
):
|
||||
input = tricky_unicode_char.encode("utf8")
|
||||
self.assertTrue(input.endswith(b'\x93'))
|
||||
output = UnicodeDammit.detwingle(input)
|
||||
self.assertEqual(output, input)
|
||||
|
||||
class TestNamedspacedAttribute(SoupTest):
|
||||
|
||||
def test_name_may_be_none(self):
|
||||
a = NamespacedAttribute("xmlns", None)
|
||||
self.assertEqual(a, "xmlns")
|
||||
|
||||
def test_attribute_is_equivalent_to_colon_separated_string(self):
|
||||
a = NamespacedAttribute("a", "b")
|
||||
self.assertEqual("a:b", a)
|
||||
|
||||
def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
|
||||
a = NamespacedAttribute("a", "b", "c")
|
||||
b = NamespacedAttribute("a", "b", "c")
|
||||
self.assertEqual(a, b)
|
||||
|
||||
# The actual namespace is not considered.
|
||||
c = NamespacedAttribute("a", "b", None)
|
||||
self.assertEqual(a, c)
|
||||
|
||||
# But name and prefix are important.
|
||||
d = NamespacedAttribute("a", "z", "c")
|
||||
self.assertNotEqual(a, d)
|
||||
|
||||
e = NamespacedAttribute("z", "b", "c")
|
||||
self.assertNotEqual(a, e)
|
||||
|
||||
|
||||
class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = CharsetMetaAttributeValue("euc-jp")
|
||||
self.assertEqual("euc-jp", value)
|
||||
self.assertEqual("euc-jp", value.original_value)
|
||||
self.assertEqual("utf8", value.encode("utf8"))
|
||||
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = ContentMetaAttributeValue("text/html; charset=euc-jp")
|
||||
self.assertEqual("text/html; charset=euc-jp", value)
|
||||
self.assertEqual("text/html; charset=euc-jp", value.original_value)
|
||||
self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/python
|
||||
# -*- coding: iso-8859-1 -*-
|
||||
#
|
||||
# progressbar - Text progressbar library for python.
|
||||
|
||||
@@ -11,4 +11,4 @@ def init_logger(logfile, loglevel):
|
||||
logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
|
||||
|
||||
class NotFoundError(Exception):
|
||||
pass
|
||||
pass
|
||||
@@ -19,7 +19,6 @@ class PRTable(object):
|
||||
def __init__(self, conn, table, nohist):
|
||||
self.conn = conn
|
||||
self.nohist = nohist
|
||||
self.dirty = False
|
||||
if nohist:
|
||||
self.table = "%s_nohist" % table
|
||||
else:
|
||||
@@ -48,11 +47,6 @@ class PRTable(object):
|
||||
self.conn.commit()
|
||||
self._execute("BEGIN EXCLUSIVE TRANSACTION")
|
||||
|
||||
def sync_if_dirty(self):
|
||||
if self.dirty:
|
||||
self.sync()
|
||||
self.dirty = False
|
||||
|
||||
def _getValueHist(self, version, pkgarch, checksum):
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
@@ -68,8 +62,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
@@ -97,8 +89,6 @@ class PRTable(object):
|
||||
logger.error(str(exc))
|
||||
self.conn.rollback()
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
@@ -128,8 +118,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
@@ -151,8 +139,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
|
||||
(version,pkgarch,checksum,value))
|
||||
row=data.fetchone()
|
||||
@@ -234,8 +220,6 @@ class PRData(object):
|
||||
raise e
|
||||
self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False)
|
||||
self.connection.row_factory=sqlite3.Row
|
||||
self.connection.execute("pragma synchronous = off;")
|
||||
self.connection.execute("PRAGMA journal_mode = WAL;")
|
||||
self._tables={}
|
||||
|
||||
def __del__(self):
|
||||
|
||||
@@ -38,17 +38,8 @@ singleton = None
|
||||
class PRServer(SimpleXMLRPCServer):
|
||||
def __init__(self, dbfile, logfile, interface, daemon=True):
|
||||
''' constructor '''
|
||||
import socket
|
||||
try:
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
logRequests=False, allow_none=True)
|
||||
except socket.error:
|
||||
ip=socket.gethostbyname(interface[0])
|
||||
port=interface[1]
|
||||
msg="PR Server unable to bind to %s:%s\n" % (ip, port)
|
||||
sys.stderr.write(msg)
|
||||
raise PRServiceConfigError
|
||||
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
logRequests=False, allow_none=True)
|
||||
self.dbfile=dbfile
|
||||
self.daemon=daemon
|
||||
self.logfile=logfile
|
||||
@@ -76,27 +67,15 @@ class PRServer(SimpleXMLRPCServer):
|
||||
In addition, exception handling is done here.
|
||||
|
||||
"""
|
||||
iter_count = 1
|
||||
# 60 iterations between syncs or sync if dirty every ~30 seconds
|
||||
iterations_between_sync = 60
|
||||
|
||||
while not self.quit:
|
||||
try:
|
||||
(request, client_address) = self.requestqueue.get(True, 30)
|
||||
except Queue.Empty:
|
||||
self.table.sync_if_dirty()
|
||||
continue
|
||||
while True:
|
||||
(request, client_address) = self.requestqueue.get()
|
||||
try:
|
||||
self.finish_request(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
iter_count = (iter_count + 1) % iterations_between_sync
|
||||
if iter_count == 0:
|
||||
self.table.sync_if_dirty()
|
||||
except:
|
||||
self.handle_error(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
self.table.sync()
|
||||
self.table.sync_if_dirty()
|
||||
|
||||
def process_request(self, request, client_address):
|
||||
self.requestqueue.put((request, client_address))
|
||||
@@ -141,7 +120,7 @@ class PRServer(SimpleXMLRPCServer):
|
||||
self.handlerthread.start()
|
||||
while not self.quit:
|
||||
self.handle_request()
|
||||
self.handlerthread.join()
|
||||
|
||||
self.table.sync()
|
||||
logger.info("PRServer: stopping...")
|
||||
self.server_close()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.filters import RelatedFieldListFilter
|
||||
from .models import BuildEnvironment
|
||||
|
||||
class BuildEnvironmentAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(BuildEnvironment, BuildEnvironmentAdmin)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user