Compare commits
397 Commits
fido-13.0.
...
dizzy-12.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b38454c2e3 | ||
|
|
19f07a31a6 | ||
|
|
03666c8a74 | ||
|
|
85f6cf736b | ||
|
|
a01280b7ab | ||
|
|
800a3dc9b0 | ||
|
|
bdfee8758e | ||
|
|
915498e230 | ||
|
|
8897773fe4 | ||
|
|
2e6494e55a | ||
|
|
55fbde1fde | ||
|
|
3a2725e5d9 | ||
|
|
adcc476412 | ||
|
|
ab4cc02bf8 | ||
|
|
01c1167336 | ||
|
|
c1803b774a | ||
|
|
d526b3f9ac | ||
|
|
e74c4a5ff4 | ||
|
|
1a99652a88 | ||
|
|
20db29fb4d | ||
|
|
f7b041121e | ||
|
|
7a263b2e60 | ||
|
|
117d9b2f45 | ||
|
|
a4162fa9fa | ||
|
|
5a3899981c | ||
|
|
db031c40bb | ||
|
|
b64eae5767 | ||
|
|
0e6473ad75 | ||
|
|
27fc73496c | ||
|
|
012e1a4431 | ||
|
|
137f52ac3a | ||
|
|
ebe3096910 | ||
|
|
f1c45d15c2 | ||
|
|
fd35017edf | ||
|
|
e07aa344ee | ||
|
|
112839bebe | ||
|
|
f48d1a75e1 | ||
|
|
4d41954e94 | ||
|
|
53b0be3761 | ||
|
|
ca052426a6 | ||
|
|
540b92736c | ||
|
|
a93005e6d0 | ||
|
|
cfc5952b11 | ||
|
|
1b492dfcdd | ||
|
|
bf3ee430a4 | ||
|
|
abd315bc05 | ||
|
|
1e6d987374 | ||
|
|
38a334ad84 | ||
|
|
00fce45b55 | ||
|
|
fcd25c6d2e | ||
|
|
9f363a9c8a | ||
|
|
19bce8f5c6 | ||
|
|
1d909fb8da | ||
|
|
d19d976bf5 | ||
|
|
ea2e7dbcd7 | ||
|
|
215c4d948d | ||
|
|
9ae261263a | ||
|
|
22690105da | ||
|
|
3054c73445 | ||
|
|
c5a583e8bd | ||
|
|
7113efd02d | ||
|
|
b469799103 | ||
|
|
0891b8789d | ||
|
|
b8b7df8304 | ||
|
|
0c1c0877e8 | ||
|
|
c930052636 | ||
|
|
6d307e9b0c | ||
|
|
d0315a6cdf | ||
|
|
5f0d25152b | ||
|
|
9c4ff467f6 | ||
|
|
6adbd2deb9 | ||
|
|
9fd145d27e | ||
|
|
29812e6173 | ||
|
|
80bc382c62 | ||
|
|
0dc2a530df | ||
|
|
6579836d82 | ||
|
|
3037db60f7 | ||
|
|
46f73593c0 | ||
|
|
f460fd853b | ||
|
|
d098f7ed05 | ||
|
|
192a9e1031 | ||
|
|
c4ebd5d28b | ||
|
|
15892013ce | ||
|
|
489df6edb8 | ||
|
|
3251b84c20 | ||
|
|
1f994e8171 | ||
|
|
433ec67686 | ||
|
|
f77133783e | ||
|
|
29855df01e | ||
|
|
7bd5bf8947 | ||
|
|
d03e94ef47 | ||
|
|
bf6f9f44ad | ||
|
|
54e3c92279 | ||
|
|
c6b0ce743f | ||
|
|
6923ef6f94 | ||
|
|
9bbe7473a9 | ||
|
|
9c2e4e50a8 | ||
|
|
e8a260c9b8 | ||
|
|
8e7d7e5c3a | ||
|
|
5ed8733bac | ||
|
|
ff71dd264a | ||
|
|
12f3536d36 | ||
|
|
94e96643db | ||
|
|
b6cc30adf4 | ||
|
|
feaf9a98df | ||
|
|
9bdf737982 | ||
|
|
c316df044a | ||
|
|
4cf1a6af8e | ||
|
|
35e54baa51 | ||
|
|
4ac156de84 | ||
|
|
0143d3a6a9 | ||
|
|
fecee8ffdc | ||
|
|
67cabcd94f | ||
|
|
96852794bc | ||
|
|
c59e3bd26d | ||
|
|
c18e52c0c8 | ||
|
|
8e64c535af | ||
|
|
763bff1f22 | ||
|
|
0d1f75b9d6 | ||
|
|
542d9770f2 | ||
|
|
13cb1aea8c | ||
|
|
55303f7a38 | ||
|
|
8a00b63e43 | ||
|
|
ec75238f6c | ||
|
|
b90dd7944e | ||
|
|
82c8438428 | ||
|
|
0fecd492b2 | ||
|
|
f2a6123ba3 | ||
|
|
ff2621b86c | ||
|
|
2a3805a666 | ||
|
|
4c6ceb07f0 | ||
|
|
1f718df76e | ||
|
|
93e3df91aa | ||
|
|
dea47b2715 | ||
|
|
6e9632e979 | ||
|
|
59a85d3a95 | ||
|
|
b630f2f536 | ||
|
|
b91ca2c5fd | ||
|
|
5db1e07e1c | ||
|
|
f5d869d9d6 | ||
|
|
1c34a41ad2 | ||
|
|
9bef9b9ddd | ||
|
|
016d607e23 | ||
|
|
42195a3ff5 | ||
|
|
15ff8423dc | ||
|
|
83767cbe90 | ||
|
|
8b3b21494f | ||
|
|
17b4994c5f | ||
|
|
d97f1c2697 | ||
|
|
a1f594881d | ||
|
|
991597a272 | ||
|
|
eebe97cd35 | ||
|
|
50572b0104 | ||
|
|
769fb519be | ||
|
|
8e02546ddd | ||
|
|
dc565377c6 | ||
|
|
8e11a94b90 | ||
|
|
cfdeaeeb77 | ||
|
|
51d5204084 | ||
|
|
909a80fe34 | ||
|
|
71164f126b | ||
|
|
76ba20f9c0 | ||
|
|
82c567748a | ||
|
|
d3953fcb40 | ||
|
|
6f79439ce4 | ||
|
|
00af07c6b6 | ||
|
|
f30d619a63 | ||
|
|
2f598a8318 | ||
|
|
d2c3e23af6 | ||
|
|
ceb5a66d0b | ||
|
|
620718b05d | ||
|
|
8b255bd491 | ||
|
|
33cda871d3 | ||
|
|
f7ba14a571 | ||
|
|
bf32370c5e | ||
|
|
2ec5a5473e | ||
|
|
dddb84aae0 | ||
|
|
6c3ccc8ae9 | ||
|
|
0cf128ca1b | ||
|
|
86da1430b7 | ||
|
|
2a53df980d | ||
|
|
a1a8857fa6 | ||
|
|
6a07977e76 | ||
|
|
0de0abd9e4 | ||
|
|
8010a0f2cf | ||
|
|
37d5b56cb6 | ||
|
|
d31f7ec85b | ||
|
|
ad9ba796fa | ||
|
|
f7fc59f2fd | ||
|
|
f70b8b393d | ||
|
|
72a43adb4d | ||
|
|
64090cf0d8 | ||
|
|
41cca6fbe7 | ||
|
|
de51204518 | ||
|
|
eed2260137 | ||
|
|
81b8da4c88 | ||
|
|
289ccaa24d | ||
|
|
646cd97d24 | ||
|
|
384863c7bc | ||
|
|
0e65f09580 | ||
|
|
43a04ddc7f | ||
|
|
8abe4b8b2a | ||
|
|
53b33d85de | ||
|
|
9fc095a439 | ||
|
|
e13f2681b7 | ||
|
|
6dd21a9f15 | ||
|
|
b6e41cf744 | ||
|
|
cc6d968241 | ||
|
|
74bb618474 | ||
|
|
ab5c5e3a0e | ||
|
|
8e354428a2 | ||
|
|
a6f2e49038 | ||
|
|
63c0b4a441 | ||
|
|
9d20b675dd | ||
|
|
39e09709bf | ||
|
|
36e42c0ddb | ||
|
|
446acfb5a4 | ||
|
|
d7c61053da | ||
|
|
b52e2f4f2e | ||
|
|
4b7d844d84 | ||
|
|
510c27ad8c | ||
|
|
9ffc238025 | ||
|
|
7461790c39 | ||
|
|
69df8dc63f | ||
|
|
2658acef69 | ||
|
|
f20e4c0cf6 | ||
|
|
d77ee86680 | ||
|
|
36576c7087 | ||
|
|
7cacecf444 | ||
|
|
0b8a386a68 | ||
|
|
87f7ca2613 | ||
|
|
068a9f5cbe | ||
|
|
27a34b69a0 | ||
|
|
ec321182bd | ||
|
|
6540ecdb37 | ||
|
|
84d0b6fd98 | ||
|
|
37ca92bb2a | ||
|
|
8dde9d4bd4 | ||
|
|
b0feb20abc | ||
|
|
6ede9224f8 | ||
|
|
112c10ac64 | ||
|
|
484b928531 | ||
|
|
0fb10cf659 | ||
|
|
f7fd58319c | ||
|
|
554962b380 | ||
|
|
6a2ff9b067 | ||
|
|
b48b07f1fb | ||
|
|
b268f7cc93 | ||
|
|
8e9950dbaa | ||
|
|
4eab67dda8 | ||
|
|
89398c3e07 | ||
|
|
c54b9fb2ed | ||
|
|
64271845dc | ||
|
|
0d65f6c16d | ||
|
|
f8adeb08f1 | ||
|
|
e0cb09c6ac | ||
|
|
d04fdd5f9e | ||
|
|
b30db74cd8 | ||
|
|
7526e8d006 | ||
|
|
fb760567a3 | ||
|
|
b6079e0c71 | ||
|
|
8d0600569b | ||
|
|
dfd5bbdfa9 | ||
|
|
49ece9bb51 | ||
|
|
10710d7a92 | ||
|
|
527574602a | ||
|
|
25cdadb86b | ||
|
|
6b3673db74 | ||
|
|
e7d461a473 | ||
|
|
358794f2fb | ||
|
|
3c76f85d5f | ||
|
|
7e9d8bcada | ||
|
|
08613cc339 | ||
|
|
5381289530 | ||
|
|
d57978aafc | ||
|
|
7fe785d692 | ||
|
|
b3e9e56756 | ||
|
|
5d7fe4a07b | ||
|
|
6062cbe8db | ||
|
|
9f41c7df9e | ||
|
|
4e0180b746 | ||
|
|
fa75856b4b | ||
|
|
8d8e8d0a8e | ||
|
|
4157aa7c0b | ||
|
|
b0811fe4a2 | ||
|
|
8f21845460 | ||
|
|
3d101b429d | ||
|
|
a4814ac1b0 | ||
|
|
9b0df21b87 | ||
|
|
2d51c7e62c | ||
|
|
22861f8031 | ||
|
|
74dec87ce2 | ||
|
|
2558a15919 | ||
|
|
2c13c5d3fe | ||
|
|
769c4ebb4f | ||
|
|
69767a27cc | ||
|
|
c9a9e0199b | ||
|
|
c458dde820 | ||
|
|
9393fdbd7e | ||
|
|
0cfc7dd0a5 | ||
|
|
04ffa0b961 | ||
|
|
fdf882c091 | ||
|
|
2d40d3228d | ||
|
|
a832f18ac2 | ||
|
|
a0d91bbdef | ||
|
|
68f431e850 | ||
|
|
e5c3c1501b | ||
|
|
bb6990e057 | ||
|
|
2f2b081589 | ||
|
|
9d4df89e5b | ||
|
|
a6d7512b5e | ||
|
|
637580101c | ||
|
|
673bb3cffc | ||
|
|
09f6349eeb | ||
|
|
31f39a91e6 | ||
|
|
b8ea994e11 | ||
|
|
3725cdf43a | ||
|
|
3244f4540c | ||
|
|
ec853e4eea | ||
|
|
07fdc5a275 | ||
|
|
dda084e13c | ||
|
|
044039dc8e | ||
|
|
0bc80a3850 | ||
|
|
1020bc3de3 | ||
|
|
a291eb108b | ||
|
|
d0c969eeab | ||
|
|
6af63cc898 | ||
|
|
5211fb73f0 | ||
|
|
c8279678d4 | ||
|
|
21b15bc6cd | ||
|
|
de6e6a5a62 | ||
|
|
5e7218e8b0 | ||
|
|
bc6651cb31 | ||
|
|
a16aa96a08 | ||
|
|
59c7cb37bc | ||
|
|
1eceece8f6 | ||
|
|
0d9dd1d3da | ||
|
|
f09b49dd64 | ||
|
|
b9304ab75c | ||
|
|
28c4a4976d | ||
|
|
a6f13fe42f | ||
|
|
e8404413fe | ||
|
|
d6cbbee29c | ||
|
|
d6e0ea59b2 | ||
|
|
be22ea0314 | ||
|
|
3fc8d29953 | ||
|
|
becb32bb30 | ||
|
|
fa34c42d19 | ||
|
|
942e35d651 | ||
|
|
c35cecebc6 | ||
|
|
c72d8913b3 | ||
|
|
e3743bbe94 | ||
|
|
02627ad3d9 | ||
|
|
ecf1e3d1b1 | ||
|
|
2310ca25ed | ||
|
|
0185dcd883 | ||
|
|
b9e61a3203 | ||
|
|
e111bb329c | ||
|
|
28f6830a49 | ||
|
|
c292340b7a | ||
|
|
118cf7bc86 | ||
|
|
4d1745feb5 | ||
|
|
d6751f2293 | ||
|
|
880e8b26ed | ||
|
|
8d5259d953 | ||
|
|
9e8bb32215 | ||
|
|
aa8bfdfa22 | ||
|
|
5c69d24f56 | ||
|
|
b70ef7b95a | ||
|
|
bd00bc3d0d | ||
|
|
e741ebf210 | ||
|
|
db012b429f | ||
|
|
90a03e9c9d | ||
|
|
b466e00cf6 | ||
|
|
00af5317eb | ||
|
|
db7f4f31c9 | ||
|
|
33e95afc83 | ||
|
|
9bfb78bff6 | ||
|
|
cccad8c33f | ||
|
|
2138890fa6 | ||
|
|
19750cac36 | ||
|
|
5deb78802a | ||
|
|
ffdef91586 | ||
|
|
09430c66b3 | ||
|
|
929d04b404 | ||
|
|
081fddd3e4 | ||
|
|
9fcd5826d9 | ||
|
|
df87cb27ef | ||
|
|
2eb659d765 | ||
|
|
f3a177cf04 | ||
|
|
58a629a1a0 | ||
|
|
b9b5aeffa6 | ||
|
|
ff5510b3fa | ||
|
|
9aff3a4ec0 | ||
|
|
16ddd45421 | ||
|
|
1a6c3a385c | ||
|
|
e95863cee0 |
2
.gitignore
vendored
@@ -8,7 +8,7 @@ scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*/
|
||||
!meta-skeleton
|
||||
!meta-selftest
|
||||
!meta-hob
|
||||
hob-image-*.bb
|
||||
*.swp
|
||||
*.orig
|
||||
|
||||
29
README
@@ -30,29 +30,20 @@ For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository (built using a tool called combo-layer),
|
||||
patches against the various components should be sent to their respective
|
||||
upstreams:
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
Git repository: http://git.openembedded.org/bitbake/
|
||||
Mailing list: bitbake-devel@lists.openembedded.org
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
documentation:
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
||||
Mailing list: yocto@yoctoproject.org
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
meta-yocto(-bsp):
|
||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
||||
Mailing list: poky@yoctoproject.org
|
||||
|
||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
||||
doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Git repository: http://git.openembedded.org/openembedded-core/
|
||||
Mailing list: openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
||||
oe-core and poky-specific files.
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -251,14 +251,14 @@ if used via a usb card reader):
|
||||
5. If using core-image-minimal rootfs, install the modules
|
||||
# tar x -C /media/root -f modules-beaglebone.tgz
|
||||
|
||||
6. If using core-image-minimal rootfs, install the kernel zImage into /boot
|
||||
6. If using core-image-minimal rootfs, install the kernel uImage into /boot
|
||||
directory of rootfs
|
||||
# cp zImage-beaglebone.bin /media/root/boot/zImage
|
||||
# cp uImage-beaglebone.bin /media/root/boot/uImage
|
||||
|
||||
7. If using core-image-minimal rootfs, also install device tree (DTB) files
|
||||
into /boot directory of rootfs
|
||||
# cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
# cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
|
||||
# cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
|
||||
|
||||
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
|
||||
boot the Beaglebone
|
||||
@@ -352,14 +352,11 @@ Setup instructions
|
||||
------------------
|
||||
|
||||
You will need the following:
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the device
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
If using NFS as part of the setup process, you will also need:
|
||||
* NFS root setup on your workstation
|
||||
* TFTP server installed on your workstation (if fetching the kernel from
|
||||
TFTP, see below).
|
||||
* TFTP server installed on your workstation
|
||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
||||
port on the board
|
||||
* Ethernet connected to the first ethernet port on the board
|
||||
|
||||
--- Preparation ---
|
||||
|
||||
@@ -367,7 +364,7 @@ Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
||||
In the following instruction it is based on core-image-minimal. Another target
|
||||
may be similiar with it.
|
||||
|
||||
--- Booting from NFS root / kernel via TFTP ---
|
||||
--- Booting from NFS root ---
|
||||
|
||||
Load the kernel, and boot the system as follows:
|
||||
|
||||
|
||||
@@ -23,22 +23,364 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||
'lib'))
|
||||
|
||||
import optparse
|
||||
import warnings
|
||||
from traceback import format_exception
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters
|
||||
|
||||
__version__ = "1.24.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
if not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
sys.exit("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
options, targets = parser.parse_args(sys.argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
sys.exit("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
configParams = BitBakeConfigParameters()
|
||||
configuration = cookerdata.CookerConfiguration()
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--server-only' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
sys.exit("FATAL: The '--server-only' option requires a name/address to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
sys.exit("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '-B' or '--bind' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
sys.exit("FATAL: If '--remote-server' is defined, we must set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
sys.exit("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
sys.exit(0)
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = bitbake_main(BitBakeConfigParameters(sys.argv),
|
||||
cookerdata.CookerConfiguration())
|
||||
ret = main()
|
||||
except bb.BBHandledException:
|
||||
ret = 1
|
||||
except Exception:
|
||||
|
||||
@@ -46,12 +46,6 @@ logger = logger_create('bitbake-diffsigs')
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
def get_hashval(siginfo):
|
||||
if siginfo.endswith('.siginfo'):
|
||||
return siginfo.rpartition(':')[2].partition('_')[0]
|
||||
else:
|
||||
return siginfo.rpartition('.')[2]
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
@@ -60,7 +54,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
taskname = 'do_%s' % taskname
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
@@ -68,16 +62,6 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# It's possible that latestfiles contain 3 elements and the first two have the same hash value.
|
||||
# In this case, we delete the second element.
|
||||
# The above case is actually the most common one. Because we may have sigdata file and siginfo
|
||||
# file having the same hash value. Comparing such two files makes no sense.
|
||||
if len(latestfiles) == 3:
|
||||
hash0 = get_hashval(latestfiles[0])
|
||||
hash1 = get_hashval(latestfiles[1])
|
||||
if hash0 == hash1:
|
||||
latestfiles.pop(1)
|
||||
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# See the help output for details on available commands.
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2011-2015 Intel Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -20,15 +20,13 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
from collections import defaultdict
|
||||
import argparse
|
||||
import re
|
||||
import httplib, urlparse, json
|
||||
import subprocess
|
||||
|
||||
bindir = os.path.dirname(__file__)
|
||||
topdir = os.path.dirname(bindir)
|
||||
@@ -41,24 +39,26 @@ import bb.utils
|
||||
import bb.tinfoil
|
||||
|
||||
|
||||
def logger_create(name, output=sys.stderr):
|
||||
logger = logging.getLogger(name)
|
||||
console = logging.StreamHandler(output)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
if output.isatty():
|
||||
format.enable_color()
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.INFO)
|
||||
return logger
|
||||
|
||||
logger = logger_create('bitbake-layers', sys.stdout)
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
|
||||
class Commands():
|
||||
def main(args):
|
||||
cmds = Commands()
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
cmds.onecmd(' '.join(args))
|
||||
else:
|
||||
cmds.do_help('')
|
||||
return cmds.returncode
|
||||
|
||||
|
||||
class Commands(cmd.Cmd):
|
||||
def __init__(self):
|
||||
self.bbhandler = None
|
||||
self.returncode = 0
|
||||
self.bblayers = []
|
||||
cmd.Cmd.__init__(self)
|
||||
|
||||
def init_bbhandler(self, config_only = False):
|
||||
if not self.bbhandler:
|
||||
@@ -66,6 +66,27 @@ class Commands():
|
||||
self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
self.bbhandler.prepare(config_only)
|
||||
|
||||
def default(self, line):
|
||||
"""Handle unrecognised commands"""
|
||||
sys.stderr.write("Unrecognised command or option\n")
|
||||
self.do_help('')
|
||||
|
||||
def do_help(self, topic):
|
||||
"""display general help or help on a specified command"""
|
||||
if topic:
|
||||
sys.stdout.write('%s: ' % topic)
|
||||
cmd.Cmd.do_help(self, topic.replace('-', '_'))
|
||||
else:
|
||||
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||
sys.stdout.write("Available commands:\n")
|
||||
procnames = list(set(self.get_names()))
|
||||
for procname in procnames:
|
||||
if procname[:3] == 'do_':
|
||||
sys.stdout.write(" %s\n" % procname[3:].replace('_', '-'))
|
||||
doc = getattr(self, procname).__doc__
|
||||
if doc:
|
||||
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||
|
||||
def do_show_layers(self, args):
|
||||
"""show current configured layers"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
@@ -82,293 +103,6 @@ class Commands():
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||
|
||||
|
||||
def do_add_layer(self, args):
|
||||
"""Add a layer to bblayers.conf
|
||||
|
||||
Adds the specified layer to bblayers.conf
|
||||
"""
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
if not os.path.exists(layerdir):
|
||||
sys.stderr.write("Specified layer directory doesn't exist\n")
|
||||
return 1
|
||||
|
||||
layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
|
||||
if not os.path.exists(layer_conf):
|
||||
sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
|
||||
return 1
|
||||
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
(notadded, _) = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
|
||||
if notadded:
|
||||
for item in notadded:
|
||||
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
|
||||
|
||||
|
||||
def do_remove_layer(self, args):
|
||||
"""Remove a layer from bblayers.conf
|
||||
|
||||
Removes the specified layer from bblayers.conf
|
||||
"""
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
if args.layerdir.startswith('*'):
|
||||
layerdir = args.layerdir
|
||||
else:
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
|
||||
if notremoved:
|
||||
for item in notremoved:
|
||||
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
|
||||
return 1
|
||||
|
||||
|
||||
def get_json_data(self, apiurl):
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
conn = None
|
||||
_parsedurl = urlparse.urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
query = _parsedurl.query
|
||||
def parse_url(url):
|
||||
parsedurl = urlparse.urlparse(url)
|
||||
if parsedurl.netloc[0] == '[':
|
||||
host, port = parsedurl.netloc[1:].split(']', 1)
|
||||
if ':' in port:
|
||||
port = port.rsplit(':', 1)[1]
|
||||
else:
|
||||
port = None
|
||||
else:
|
||||
if parsedurl.netloc.count(':') == 1:
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
else:
|
||||
host = parsedurl.netloc
|
||||
port = None
|
||||
return (host, 80 if port is None else int(port))
|
||||
|
||||
if proxy_settings is None:
|
||||
host, port = parse_url(apiurl)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", path + "?" + query)
|
||||
else:
|
||||
host, port = parse_url(proxy_settings)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", apiurl)
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status != 200:
|
||||
raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
|
||||
return json.loads(r.read())
|
||||
|
||||
|
||||
def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False):
|
||||
def layeritems_info_id(items_name, layeritems):
|
||||
litems_id = None
|
||||
for li in layeritems:
|
||||
if li['name'] == items_name:
|
||||
litems_id = li['id']
|
||||
break
|
||||
return litems_id
|
||||
|
||||
def layerbranches_info(items_id, layerbranches):
|
||||
lbranch = {}
|
||||
for lb in layerbranches:
|
||||
if lb['layer'] == items_id and lb['branch'] == branchnum:
|
||||
lbranch['id'] = lb['id']
|
||||
lbranch['vcs_subdir'] = lb['vcs_subdir']
|
||||
break
|
||||
return lbranch
|
||||
|
||||
def layerdependencies_info(lb_id, layerdependencies):
|
||||
ld_deps = []
|
||||
for ld in layerdependencies:
|
||||
if ld['layerbranch'] == lb_id and not ld['dependency'] in ld_deps:
|
||||
ld_deps.append(ld['dependency'])
|
||||
if not ld_deps:
|
||||
logger.error("The dependency of layerDependencies is not found.")
|
||||
return ld_deps
|
||||
|
||||
def layeritems_info_name_subdir(items_id, layeritems):
|
||||
litems = {}
|
||||
for li in layeritems:
|
||||
if li['id'] == items_id:
|
||||
litems['vcs_url'] = li['vcs_url']
|
||||
litems['name'] = li['name']
|
||||
break
|
||||
return litems
|
||||
|
||||
if selfname:
|
||||
selfid = layeritems_info_id(layername, layeritems)
|
||||
lbinfo = layerbranches_info(selfid, layerbranches)
|
||||
if lbinfo:
|
||||
selfsubdir = lbinfo['vcs_subdir']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return
|
||||
selfurl = layeritems_info_name_subdir(selfid, layeritems)['vcs_url']
|
||||
if selfurl:
|
||||
return selfurl, selfsubdir
|
||||
else:
|
||||
logger.error("Cannot get layer %s git repo and subdir" % layername)
|
||||
return
|
||||
ldict = {}
|
||||
itemsid = layeritems_info_id(layername, layeritems)
|
||||
if not itemsid:
|
||||
return layername, None
|
||||
lbid = layerbranches_info(itemsid, layerbranches)
|
||||
if lbid:
|
||||
lbid = layerbranches_info(itemsid, layerbranches)['id']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return None, None
|
||||
for dependency in layerdependencies_info(lbid, layerdependencies):
|
||||
lname = layeritems_info_name_subdir(dependency, layeritems)['name']
|
||||
lurl = layeritems_info_name_subdir(dependency, layeritems)['vcs_url']
|
||||
lsubdir = layerbranches_info(dependency, layerbranches)['vcs_subdir']
|
||||
ldict[lname] = lurl, lsubdir
|
||||
return None, ldict
|
||||
|
||||
|
||||
def get_fetch_layer(self, fetchdir, url, subdir, fetch_layer):
|
||||
layername = self.get_layer_name(url)
|
||||
if os.path.splitext(layername)[1] == '.git':
|
||||
layername = os.path.splitext(layername)[0]
|
||||
repodir = os.path.join(fetchdir, layername)
|
||||
layerdir = os.path.join(repodir, subdir)
|
||||
if not os.path.exists(repodir):
|
||||
if fetch_layer:
|
||||
result = subprocess.call('git clone %s %s' % (url, repodir), shell = True)
|
||||
if result:
|
||||
logger.error("Failed to download %s" % url)
|
||||
return None, None
|
||||
else:
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.plain("Repository %s needs to be fetched" % url)
|
||||
return layername, layerdir
|
||||
elif os.path.exists(layerdir):
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.error("%s is not in %s" % (url, subdir))
|
||||
return None, None
|
||||
|
||||
|
||||
def do_layerindex_fetch(self, args):
|
||||
"""Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
|
||||
"""
|
||||
self.init_bbhandler(config_only = True)
|
||||
apiurl = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True)
|
||||
if not apiurl:
|
||||
logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
|
||||
else:
|
||||
if apiurl[-1] != '/':
|
||||
apiurl += '/'
|
||||
apiurl += "api/"
|
||||
apilinks = self.get_json_data(apiurl)
|
||||
branches = self.get_json_data(apilinks['branches'])
|
||||
|
||||
branchnum = 0
|
||||
for branch in branches:
|
||||
if branch['name'] == args.branch:
|
||||
branchnum = branch['id']
|
||||
break
|
||||
if branchnum == 0:
|
||||
validbranches = ', '.join([branch['name'] for branch in branches])
|
||||
logger.error('Invalid layer branch name "%s". Valid branches: %s' % (args.branch, validbranches))
|
||||
return 1
|
||||
|
||||
ignore_layers = []
|
||||
for collection in self.bbhandler.config_data.getVar('BBFILE_COLLECTIONS', True).split():
|
||||
lname = self.bbhandler.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True)
|
||||
if lname:
|
||||
ignore_layers.append(lname)
|
||||
|
||||
if args.ignore:
|
||||
ignore_layers.extend(args.ignore.split(','))
|
||||
|
||||
layeritems = self.get_json_data(apilinks['layerItems'])
|
||||
layerbranches = self.get_json_data(apilinks['layerBranches'])
|
||||
layerdependencies = self.get_json_data(apilinks['layerDependencies'])
|
||||
invaluenames = []
|
||||
repourls = {}
|
||||
printlayers = []
|
||||
def query_dependencies(layers, layeritems, layerbranches, layerdependencies, branchnum):
|
||||
depslayer = []
|
||||
for layername in layers:
|
||||
invaluename, layerdict = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
if layerdict:
|
||||
repourls[layername] = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=True)
|
||||
for layer in layerdict:
|
||||
if not layer in ignore_layers:
|
||||
depslayer.append(layer)
|
||||
printlayers.append((layername, layer, layerdict[layer][0], layerdict[layer][1]))
|
||||
if not layer in ignore_layers and not layer in repourls:
|
||||
repourls[layer] = (layerdict[layer][0], layerdict[layer][1])
|
||||
if invaluename and not invaluename in invaluenames:
|
||||
invaluenames.append(invaluename)
|
||||
return depslayer
|
||||
|
||||
depslayers = query_dependencies(args.layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
while depslayers:
|
||||
depslayer = query_dependencies(depslayers, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
depslayers = depslayer
|
||||
if invaluenames:
|
||||
for invaluename in invaluenames:
|
||||
logger.error('Layer "%s" not found in layer index' % invaluename)
|
||||
return 1
|
||||
logger.plain("%s %s %s %s" % ("Layer".ljust(19), "Required by".ljust(19), "Git repository".ljust(54), "Subdirectory"))
|
||||
logger.plain('=' * 115)
|
||||
for layername in args.layername:
|
||||
layerurl = repourls[layername]
|
||||
logger.plain("%s %s %s %s" % (layername.ljust(20), '-'.ljust(20), layerurl[0].ljust(55), layerurl[1]))
|
||||
printedlayers = []
|
||||
for layer, dependency, gitrepo, subdirectory in printlayers:
|
||||
if dependency in printedlayers:
|
||||
continue
|
||||
logger.plain("%s %s %s %s" % (dependency.ljust(20), layer.ljust(20), gitrepo.ljust(55), subdirectory))
|
||||
printedlayers.append(dependency)
|
||||
|
||||
if repourls:
|
||||
fetchdir = self.bbhandler.config_data.getVar('BBLAYERS_FETCH_DIR', True)
|
||||
if not fetchdir:
|
||||
logger.error("Cannot get BBLAYERS_FETCH_DIR")
|
||||
return 1
|
||||
if not os.path.exists(fetchdir):
|
||||
os.makedirs(fetchdir)
|
||||
addlayers = []
|
||||
for repourl, subdir in repourls.values():
|
||||
name, layerdir = self.get_fetch_layer(fetchdir, repourl, subdir, not args.show_only)
|
||||
if not name:
|
||||
# Error already shown
|
||||
return 1
|
||||
addlayers.append((subdir, name, layerdir))
|
||||
if not args.show_only:
|
||||
for subdir, name, layerdir in set(addlayers):
|
||||
if os.path.exists(layerdir):
|
||||
if subdir:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % subdir)
|
||||
else:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % name)
|
||||
localargs = argparse.Namespace()
|
||||
localargs.layerdir = layerdir
|
||||
self.do_add_layer(localargs)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
def do_layerindex_show_depends(self, args):
|
||||
"""Find layer dependencies from layer index.
|
||||
"""
|
||||
args.show_only = True
|
||||
args.ignore = []
|
||||
self.do_layerindex_fetch(args)
|
||||
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
@@ -381,13 +115,32 @@ Removes the specified layer from bblayers.conf
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, args.same_version, args.filenames, True)
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-s':
|
||||
show_same_ver_only = True
|
||||
else:
|
||||
sys.stderr.write("show-overlayed: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, show_same_ver_only, show_filenames, True)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = defaultdict(list)
|
||||
@@ -414,7 +167,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
@@ -428,7 +181,7 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if args.filenames:
|
||||
if show_filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
@@ -443,15 +196,38 @@ are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
Lists the names of recipes and the available versions in each
|
||||
usage: show-recipes [-f] [-m] [pnspec]
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
Options:
|
||||
-f instead of the default formatting, list filenames of higher priority
|
||||
recipes with other available recipes indented underneath
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
pnspec = None
|
||||
title = 'Available recipes:'
|
||||
self.list_recipes(title, args.pnspec, False, False, args.filenames, args.multiple)
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
elif arg == '-m':
|
||||
show_multi_provider_only = True
|
||||
elif not arg.startswith('-'):
|
||||
pnspec = arg
|
||||
title = 'Available recipes matching %s:' % pnspec
|
||||
else:
|
||||
sys.stderr.write("show-recipes: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
self.list_recipes(title, pnspec, False, False, show_filenames, show_multi_provider_only)
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
@@ -501,13 +277,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
|
||||
preffile = realfn[0]
|
||||
# We only display once per recipe, we should prefer non extended versions of the
|
||||
# recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
|
||||
# which would otherwise sort first).
|
||||
if realfn[1] and realfn[0] in self.bbhandler.cooker.recipecache.pkg_fn:
|
||||
continue
|
||||
preffile = bb.cache.Cache.virtualfn2realfn(pref[1])[0]
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
@@ -537,7 +307,9 @@ skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flatten layer configuration into a separate output directory.
|
||||
"""flattens layer configuration into a separate output directory.
|
||||
|
||||
usage: flatten [layer1 layer2 [layer3]...] <outputdir>
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
@@ -559,19 +331,26 @@ bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
if len(args.layer) == 1:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
return 1
|
||||
arglist = args.split()
|
||||
if len(arglist) < 1:
|
||||
logger.error('Please specify an output directory')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = args.outputdir
|
||||
if len(arglist) == 2:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
self.do_help('flatten')
|
||||
return
|
||||
|
||||
outputdir = arglist[-1]
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
return 1
|
||||
return
|
||||
|
||||
self.init_bbhandler()
|
||||
layers = self.bblayers
|
||||
if len(args.layer) > 2:
|
||||
layernames = args.layer
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
@@ -720,12 +499,14 @@ build results (as the layer priority order has effectively changed).
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
Lists recipes with the bbappends that apply to them as subitems.
|
||||
usage: show-appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
self.init_bbhandler()
|
||||
if not self.bbhandler.cooker.collection.appendlist:
|
||||
logger.plain('No append files found')
|
||||
return 0
|
||||
return
|
||||
|
||||
logger.plain('=== Appended recipes ===')
|
||||
|
||||
@@ -764,6 +545,7 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
if best_filename in missing:
|
||||
logger.warn('%s: missing append for preferred version',
|
||||
best_filename)
|
||||
self.returncode |= 1
|
||||
|
||||
|
||||
def get_appends_for_files(self, filenames):
|
||||
@@ -782,13 +564,29 @@ Lists recipes with the bbappends that apply to them as subitems.
|
||||
return appended, notappended
|
||||
|
||||
def do_show_cross_depends(self, args):
|
||||
"""Show dependencies between recipes that cross layer boundaries.
|
||||
"""figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Figure out the dependencies between recipes that cross layer boundaries.
|
||||
usage: show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]
|
||||
|
||||
NOTE: .bbappend files can impact the dependencies.
|
||||
Figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Options:
|
||||
-f show full file path
|
||||
-i ignore dependencies on items in the specified layer(s)
|
||||
|
||||
NOTE:
|
||||
The .bbappend file can impact the dependency.
|
||||
"""
|
||||
ignore_layers = (args.ignore or '').split(',')
|
||||
import optparse
|
||||
|
||||
parser = optparse.OptionParser(usage="show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]")
|
||||
parser.add_option("-f", "",
|
||||
action="store_true", dest="show_filenames")
|
||||
parser.add_option("-i", "",
|
||||
action="store", dest="ignore_layers", default="")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
ignore_layers = options.ignore_layers.split(',')
|
||||
|
||||
self.init_bbhandler()
|
||||
|
||||
@@ -814,7 +612,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data,
|
||||
self.bbhandler.cooker_data.pkg_pn)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], options.show_filenames, ignore_layers)
|
||||
|
||||
# The RDPENDS
|
||||
all_rdeps = self.bbhandler.cooker_data.rundeps[f].values()
|
||||
@@ -834,7 +632,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rdep,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
|
||||
# The RRECOMMENDS
|
||||
all_rrecs = self.bbhandler.cooker_data.runrecs[f].values()
|
||||
@@ -854,7 +652,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rrec,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
|
||||
# The inherit class
|
||||
cls_re = re.compile('classes/')
|
||||
@@ -869,7 +667,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
continue
|
||||
inherit_layername = self.get_file_layer(cls)
|
||||
if inherit_layername != layername and not inherit_layername in ignore_layers:
|
||||
if not args.filenames:
|
||||
if not options.show_filenames:
|
||||
f_short = self.remove_layer_prefix(f)
|
||||
cls = self.remove_layer_prefix(cls)
|
||||
else:
|
||||
@@ -889,7 +687,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr:
|
||||
pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1]
|
||||
needed_file = re.sub(r"\${PV}", pv, needed_file)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, options.show_filenames, ignore_layers)
|
||||
line = fnfile.readline()
|
||||
fnfile.close()
|
||||
|
||||
@@ -916,7 +714,7 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
bbclass=".bbclass"
|
||||
# Find a 'require/include xxxx'
|
||||
if m:
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, args.filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, options.show_filenames, ignore_layers)
|
||||
line = ffile.readline()
|
||||
ffile.close()
|
||||
|
||||
@@ -956,84 +754,5 @@ NOTE: .bbappend files can impact the dependencies.
|
||||
|
||||
logger.plain("%s %s %s" % (f, keyword, best_realfn))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
cmds = Commands()
|
||||
|
||||
def add_command(cmdname, function, *args, **kwargs):
|
||||
# Convert docstring for function to help (one-liner shown in main --help) and description (shown in subcommand --help)
|
||||
docsplit = function.__doc__.splitlines()
|
||||
help = docsplit[0]
|
||||
if len(docsplit) > 1:
|
||||
desc = '\n'.join(docsplit[1:])
|
||||
else:
|
||||
desc = help
|
||||
subparser = subparsers.add_parser(cmdname, *args, help=help, description=desc, formatter_class=argparse.RawTextHelpFormatter, **kwargs)
|
||||
subparser.set_defaults(func=function)
|
||||
return subparser
|
||||
|
||||
parser = argparse.ArgumentParser(description="BitBake layers utility",
|
||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
|
||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
||||
|
||||
parser_show_layers = add_command('show-layers', cmds.do_show_layers)
|
||||
|
||||
parser_add_layer = add_command('add-layer', cmds.do_add_layer)
|
||||
parser_add_layer.add_argument('layerdir', help='Layer directory to add')
|
||||
|
||||
parser_remove_layer = add_command('remove-layer', cmds.do_remove_layer)
|
||||
parser_remove_layer.add_argument('layerdir', help='Layer directory to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
parser_remove_layer.set_defaults(func=cmds.do_remove_layer)
|
||||
|
||||
parser_show_overlayed = add_command('show-overlayed', cmds.do_show_overlayed)
|
||||
parser_show_overlayed.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_overlayed.add_argument('-s', '--same-version', help='only list overlayed recipes where the version is the same', action='store_true')
|
||||
|
||||
parser_show_recipes = add_command('show-recipes', cmds.do_show_recipes)
|
||||
parser_show_recipes.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_recipes.add_argument('-m', '--multiple', help='only list where multiple recipes (in the same layer or different layers) exist for the same recipe name', action='store_true')
|
||||
parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
|
||||
parser_show_appends = add_command('show-appends', cmds.do_show_appends)
|
||||
|
||||
parser_flatten = add_command('flatten', cmds.do_flatten)
|
||||
parser_flatten.add_argument('layer', nargs='*', help='Optional layer(s) to flatten (otherwise all are flattened)')
|
||||
parser_flatten.add_argument('outputdir', help='Output directory')
|
||||
|
||||
parser_show_cross_depends = add_command('show-cross-depends', cmds.do_show_cross_depends)
|
||||
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
|
||||
parser_show_cross_depends.add_argument('-i', '--ignore', help='ignore dependencies on items in the specified layer(s) (split multiple layer names with commas, no spaces)', metavar='LAYERNAME')
|
||||
|
||||
parser_layerindex_fetch = add_command('layerindex-fetch', cmds.do_layerindex_fetch)
|
||||
parser_layerindex_fetch.add_argument('-n', '--show-only', help='show dependencies and do nothing else', action='store_true')
|
||||
parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
|
||||
parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
|
||||
|
||||
parser_layerindex_show_depends = add_command('layerindex-show-depends', cmds.do_layerindex_show_depends)
|
||||
parser_layerindex_show_depends.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_show_depends.add_argument('layername', nargs='+', help='layer to query')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
elif args.quiet:
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
ret = args.func(args)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
ret = main()
|
||||
except Exception:
|
||||
ret = 1
|
||||
import traceback
|
||||
traceback.print_exc(5)
|
||||
sys.exit(ret)
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]) or 0)
|
||||
|
||||
@@ -34,7 +34,7 @@ from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
|
||||
@@ -37,7 +37,7 @@ function webserverKillAll()
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
ps fux | grep "python.*manage.py" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
@@ -68,14 +68,12 @@ function webserverStartAll()
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver..."
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >>${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
echo "Starting webserver"
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
@@ -85,10 +83,8 @@ function webserverStartAll()
|
||||
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
@@ -103,7 +99,7 @@ function stop_system()
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
BBSERVER=0.0.0.0:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# force stop any misbehaving bitbake server
|
||||
@@ -126,130 +122,43 @@ function notify_chldexit() {
|
||||
}
|
||||
|
||||
|
||||
function verify_prereq() {
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (6,)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.6. Please install with\n\npip install django==1.6\n"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print reduce(lambda x, y: 2 if x==2 else 0 if x == 0 else y, map(lambda x: 1+cmp(x[1]-x[0],0), zip([0,8,4], map(int,south.__version__.split(\".\"))))) > 0" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4\n"
|
||||
return 2
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# read command line parameters
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
echo -e "Error: It looks like you sourced oe-init-build-env. Toaster cannot start in build mode from an oe-core build environment.\n You should be starting Toaster from a new terminal window." 1>&2;
|
||||
echo -e "Error: build/ directory detected. Toaster will not start in managed mode if a build environment is detected.\nUse a clean terminal to start Toaster." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
if [ "x`which daemon`" == "x" ]; then
|
||||
echo -e "Failed dependency; toaster needs the 'daemon' program in order to be able to start builds'. Please install the 'daemon' program from your distribution repositories or http://www.libslack.org/daemon/" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp/toaster_$$
|
||||
if [ -d "$BUILDDIR" ]; then
|
||||
echo -e "Previous toaster run directory $BUILDDIR found, cowardly refusing to start. Please remove the directory when that toaster instance is over" 2>&1
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
mkdir -p "$BUILDDIR"
|
||||
|
||||
BUILDDIR=/tmp
|
||||
RUNNING=1
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
|
||||
function do_cleanup() {
|
||||
find "$BUILDDIR" -type f | xargs rm
|
||||
rmdir "$BUILDDIR"
|
||||
}
|
||||
function cleanup() {
|
||||
if grep -ir error "$BUILDDIR" >/dev/null; then
|
||||
if grep -irn "That port is already in use" "$BUILDDIR"; then
|
||||
echo "You can use the \"webport=PORTNUMBER\" parameter to start Toaster on a different port (port $WEB_PORT is already in use)"
|
||||
do_cleanup
|
||||
else
|
||||
echo -e "\nErrors found in the Toaster log files present in '$BUILDDIR'. Directory will not be cleaned.\n Please review the errors and notify toaster@yoctoproject.org or submit a bug https://bugzilla.yoctoproject.org/enter_bug.cgi?product=Toaster"
|
||||
fi
|
||||
else
|
||||
echo "No errors found, removing the run directory '$BUILDDIR'"
|
||||
do_cleanup
|
||||
fi;
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
cleanup
|
||||
exit 1;
|
||||
fi
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
echo "Starting browser..."
|
||||
xdg-open http://127.0.0.1:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
fi
|
||||
xdg-open http://0.0.0.0:8000/ >/dev/null 2>&1 &
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Toaster is now running. You can stop it with Ctrl-C"
|
||||
echo "Running. Stop with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds 2>&1 | tee -a "$BUILDDIR/toaster.log"
|
||||
sleep 1
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds
|
||||
sleep 1
|
||||
done
|
||||
cleanup
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
if ! verify_prereq; then
|
||||
echo -e "Error: Could not verify that the needed dependencies are installed. Please use virtualenv and pip to install dependencies listed in toaster-requirements.txt" 1>&2;
|
||||
return 1;
|
||||
fi
|
||||
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
@@ -257,6 +166,23 @@ if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
CMD="$1"
|
||||
@@ -268,6 +194,19 @@ else
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
@@ -277,54 +216,42 @@ if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Warning: bitbake appears to be dead, but the Toaster web server is running. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server to start from a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
start_success=1
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
unset BBSERVER
|
||||
PREREAD=""
|
||||
if [ -e ${BUILDDIR}/conf/toaster-pre.conf ]; then
|
||||
rm ${BUILDDIR}/conf/toaster-pre.conf
|
||||
fi
|
||||
bitbake $PREREAD --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
export BBSERVER=0.0.0.0:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >>${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
fi
|
||||
if [ $start_success -eq 1 ]; then
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2014 Alex Damian
|
||||
#
|
||||
# This file re-uses code spread throughout other Bitbake source files.
|
||||
# As such, all other copyrights belong to their own right holders.
|
||||
#
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
# This command takes a filename as a single parameter. The filename is read
|
||||
# as a build eventlog, and the ToasterUI is used to process events in the file
|
||||
# and log data in the database
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
|
||||
# mangle syspath to allow easy import of modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
'lib'))
|
||||
|
||||
|
||||
import bb.cooker
|
||||
from bb.ui import toasterui
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format_str = "%(levelname)s: %(message)s"
|
||||
logging.basicConfig(format=format_str)
|
||||
|
||||
|
||||
import json, pickle
|
||||
|
||||
|
||||
class FileReadEventsServerConnection():
|
||||
""" Emulates a connection to a bitbake server that feeds
|
||||
events coming actually read from a saved log file.
|
||||
"""
|
||||
|
||||
class MockConnection():
|
||||
""" fill-in for the proxy to the server. we just return generic data
|
||||
"""
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
|
||||
def runCommand(self, commandArray):
|
||||
""" emulates running a command on the server; only read-only commands are accepted """
|
||||
command_name = commandArray[0]
|
||||
|
||||
if command_name == "getVariable":
|
||||
if commandArray[1] in self._sc._variables:
|
||||
return (self._sc._variables[commandArray[1]]['v'], None)
|
||||
return (None, "Missing variable")
|
||||
|
||||
elif command_name == "getAllKeysWithFlags":
|
||||
dump = {}
|
||||
flaglist = commandArray[1]
|
||||
for k in self._sc._variables.keys():
|
||||
try:
|
||||
if not k.startswith("__"):
|
||||
v = self._sc._variables[k]['v']
|
||||
dump[k] = {
|
||||
'v' : v ,
|
||||
'history' : self._sc._variables[k]['history'],
|
||||
}
|
||||
for d in flaglist:
|
||||
dump[k][d] = self._sc._variables[k][d]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return (dump, None)
|
||||
else:
|
||||
raise Exception("Command %s not implemented" % commandArray[0])
|
||||
|
||||
def terminateServer(self):
|
||||
""" do not do anything """
|
||||
pass
|
||||
|
||||
|
||||
|
||||
class EventReader():
|
||||
def __init__(self, sc):
|
||||
self._sc = sc
|
||||
self.firstraise = 0
|
||||
|
||||
def _create_event(self, line):
|
||||
def _import_class(name):
|
||||
assert len(name) > 0
|
||||
assert "." in name, name
|
||||
|
||||
components = name.strip().split(".")
|
||||
modulename = ".".join(components[:-1])
|
||||
moduleklass = components[-1]
|
||||
|
||||
module = __import__(modulename, fromlist=[str(moduleklass)])
|
||||
return getattr(module, moduleklass)
|
||||
|
||||
# we build a toaster event out of current event log line
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event_class = _import_class(event_data['class'])
|
||||
event_object = pickle.loads(json.loads(event_data['vars']))
|
||||
except ValueError as e:
|
||||
print("Failed loading ", line)
|
||||
raise e
|
||||
|
||||
if not isinstance(event_object, event_class):
|
||||
raise Exception("Error loading objects %s class %s ", event_object, event_class)
|
||||
|
||||
return event_object
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
|
||||
nextline = self._sc._eventfile.readline()
|
||||
if len(nextline) == 0:
|
||||
# the build data ended, while toasterui still waits for events.
|
||||
# this happens when the server was abruptly stopped, so we simulate this
|
||||
self.firstraise += 1
|
||||
if self.firstraise == 1:
|
||||
raise KeyboardInterrupt()
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
self._sc.lineno += 1
|
||||
return self._create_event(nextline)
|
||||
|
||||
|
||||
def _readVariables(self, variableline):
|
||||
self._variables = json.loads(variableline.strip())['allvariables']
|
||||
|
||||
|
||||
def __init__(self, file_name):
|
||||
self.connection = FileReadEventsServerConnection.MockConnection(self)
|
||||
self._eventfile = open(file_name, "r")
|
||||
|
||||
# we expect to have the variable dump at the start of the file
|
||||
self.lineno = 1
|
||||
self._readVariables(self._eventfile.readline())
|
||||
|
||||
self.events = FileReadEventsServerConnection.EventReader(self)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class MockConfigParameters():
|
||||
""" stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
|
||||
serves just to supply needed interfaces for the toaster ui to work """
|
||||
def __init__(self):
|
||||
self.observe_only = True # we can only read files
|
||||
|
||||
|
||||
# run toaster ui on our mock bitbake class
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
logger.error("Usage: %s event.log " % sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
file_name = sys.argv[-1]
|
||||
mock_connection = FileReadEventsServerConnection(file_name)
|
||||
configParams = MockConfigParameters()
|
||||
|
||||
# run the main program
|
||||
toasterui.main(mock_connection.connection, mock_connection.events, configParams)
|
||||
@@ -11,7 +11,7 @@
|
||||
# validate: validates
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML version of every document. The
|
||||
# The Makefile generates an HTML and PDF version of every document. The
|
||||
# variable DOC indicates the folder name for a given manual.
|
||||
#
|
||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
||||
@@ -21,8 +21,8 @@
|
||||
# make DOC=bitbake-user-manual
|
||||
# make pdf DOC=bitbake-user-manual
|
||||
#
|
||||
# The first example generates the HTML version of the User Manual.
|
||||
# The second example generates the PDF version of the User Manual.
|
||||
# The first example generates the HTML and PDF versions of the User Manual.
|
||||
# The second example generates the HTML version only of the User Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bitbake-user-manual)
|
||||
@@ -31,9 +31,9 @@ XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html bitbake-user-manual.pdf figures/bitbake-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
|
||||
@@ -345,7 +345,7 @@
|
||||
A special value of "now" causes the checkout to
|
||||
be updated on every build.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
||||
<listitem><para><emphasis><filename>CVSDIR</filename>:</emphasis>
|
||||
Specifies where a temporary checkout is saved.
|
||||
The location is often <filename>DL_DIR/cvs</filename>.
|
||||
</para></listitem>
|
||||
@@ -394,8 +394,7 @@
|
||||
Effectively, you are renaming the output directory
|
||||
to which the module is unpacked.
|
||||
You are forcing the module into a special
|
||||
directory relative to
|
||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
||||
directory relative to <filename>CVSDIR</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"rsh"</emphasis>
|
||||
Used in conjunction with the "method" parameter.
|
||||
@@ -436,9 +435,9 @@
|
||||
The executable used is specified by
|
||||
<filename>FETCHCMD_svn</filename>, which defaults
|
||||
to "svn".
|
||||
The fetcher's temporary working directory is set by
|
||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/svn</filename>.
|
||||
The fetcher's temporary working directory is set
|
||||
by <filename>SVNDIR</filename>, which is usually
|
||||
<filename>DL_DIR/svn</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -492,9 +491,8 @@
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into
|
||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
||||
which is usually <filename>DL_DIR/git2</filename>.
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git2</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
|
||||
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -1140,6 +1140,8 @@
|
||||
<filename>DISPLAY</filename> variable.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BB_ORIGENV - add example?
|
||||
|
||||
origenv = d.getVar("BB_ORIGENV", False)
|
||||
bar = origenv.getVar("BAR", False)
|
||||
</literallayout>
|
||||
|
||||
@@ -43,8 +43,8 @@
|
||||
<link linkend='var-DEFAULT_PREFERENCE'>D</link>
|
||||
<link linkend='var-EXCLUDE_FROM_WORLD'>E</link>
|
||||
<link linkend='var-FAKEROOT'>F</link>
|
||||
<link linkend='var-GITDIR'>G</link>
|
||||
<link linkend='var-HGDIR'>H</link>
|
||||
<!-- <link linkend='var-GROUPADD_PARAM'>G</link> -->
|
||||
<link linkend='var-HOMEPAGE'>H</link>
|
||||
<!-- <link linkend='var-ICECC_DISABLED'>I</link> -->
|
||||
<!-- <link linkend='var-glossary-j'>J</link> -->
|
||||
<!-- <link linkend='var-KARCH'>K</link> -->
|
||||
@@ -1154,15 +1154,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-BZRDIR'><glossterm>BZRDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Bazaar
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-c'><title>C</title>
|
||||
@@ -1177,15 +1168,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-CVSDIR'><glossterm>CVSDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out under the
|
||||
CVS system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-d'><title>D</title>
|
||||
@@ -1430,32 +1412,13 @@
|
||||
|
||||
</glossdiv>
|
||||
|
||||
|
||||
<!--
|
||||
<glossdiv id='var-glossary-g'><title>G</title>
|
||||
|
||||
<glossentry id='var-GITDIR'><glossterm>GITDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which a local copy of a Git repository
|
||||
is stored when it is cloned.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
-->
|
||||
|
||||
<glossdiv id='var-glossary-h'><title>H</title>
|
||||
|
||||
<glossentry id='var-HGDIR'><glossterm>HGDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Mercurial
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
|
||||
<glossdef>
|
||||
<para>Website where more information about the software the recipe is building
|
||||
@@ -2137,15 +2100,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-SVNDIR'><glossterm>SVNDIR</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
The directory in which files checked out of a Subversion
|
||||
system are stored.
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
</glossdiv>
|
||||
|
||||
<glossdiv id='var-glossary-t'><title>T</title>
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.26.0"
|
||||
__version__ = "1.24.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
|
||||
@@ -178,7 +178,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
|
||||
@@ -123,11 +123,11 @@ class Command:
|
||||
|
||||
def finishAsyncCommand(self, msg=None, code=None):
|
||||
if msg or msg == "":
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandFailed(msg), self.cooker.event_data)
|
||||
elif code:
|
||||
bb.event.fire(CommandExit(code), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandExit(code), self.cooker.event_data)
|
||||
else:
|
||||
bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
|
||||
bb.event.fire(CommandCompleted(), self.cooker.event_data)
|
||||
self.currentAsyncCommand = None
|
||||
self.cooker.finishcommand()
|
||||
|
||||
@@ -273,8 +273,7 @@ class CommandsSync:
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
environment = params[1]
|
||||
command.cooker.updateConfigOpts(options, environment)
|
||||
command.cooker.updateConfigOpts(options)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
|
||||
@@ -199,7 +199,7 @@ class BBCooker:
|
||||
if not watcher:
|
||||
watcher = self.watcher
|
||||
for i in deps:
|
||||
f = os.path.dirname(i[0])
|
||||
f = i[0]
|
||||
if f in watcher.bbseen:
|
||||
continue
|
||||
watcher.bbseen.append(f)
|
||||
@@ -215,11 +215,6 @@ class BBCooker:
|
||||
f = os.path.dirname(f)
|
||||
watcher.bbseen.append(f)
|
||||
continue
|
||||
if 'ENOSPC' in str(e):
|
||||
providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
|
||||
providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
|
||||
providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
|
||||
providerlog.error("Root privilege is required to modify max_user_watches.")
|
||||
raise
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
@@ -231,13 +226,13 @@ class BBCooker:
|
||||
|
||||
def setFeatures(self, features):
|
||||
# we only accept a new feature set if we're in state initial, so we can reset without problems
|
||||
if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
|
||||
if self.state != state.initial:
|
||||
raise Exception("Illegal state for feature set change")
|
||||
original_featureset = list(self.featureset)
|
||||
for feature in features:
|
||||
self.featureset.setFeature(feature)
|
||||
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
|
||||
if (original_featureset != list(self.featureset)) and self.state != state.error:
|
||||
if (original_featureset != list(self.featureset)):
|
||||
self.reset()
|
||||
|
||||
def initConfigurationData(self):
|
||||
@@ -271,82 +266,12 @@ class BBCooker:
|
||||
self.data = self.databuilder.data
|
||||
self.data_hash = self.databuilder.data_hash
|
||||
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
import json, pickle
|
||||
DEFAULT_EVENTFILE = self.configuration.writeeventlog
|
||||
class EventLogWriteHandler():
|
||||
|
||||
class EventWriter():
|
||||
def __init__(self, cooker):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.event_queue = []
|
||||
|
||||
def init_file(self):
|
||||
try:
|
||||
# delete the old log
|
||||
os.remove(DEFAULT_EVENTFILE)
|
||||
except:
|
||||
pass
|
||||
|
||||
# write current configuration data
|
||||
with open(DEFAULT_EVENTFILE, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
def write_event(self, event):
|
||||
with open(DEFAULT_EVENTFILE, "a") as f:
|
||||
try:
|
||||
f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(e, traceback.format_exc(e))
|
||||
|
||||
|
||||
def send(self, event):
|
||||
event_class = event.__module__ + "." + event.__class__.__name__
|
||||
|
||||
# init on bb.event.BuildStarted
|
||||
if self.file_inited is None:
|
||||
if event_class == "bb.event.BuildStarted":
|
||||
self.init_file()
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for e in self.event_queue:
|
||||
self.write_event(e)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
else:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
|
||||
# set our handler's event processor
|
||||
event = EventWriter(self) # self is the cooker here
|
||||
|
||||
|
||||
# set up cooker features for this mock UI handler
|
||||
|
||||
# we need to write the dependency tree in the log
|
||||
self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
|
||||
# register the log file writer as UI Handler
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler())
|
||||
|
||||
|
||||
#
|
||||
# Copy of the data store which has been expanded.
|
||||
# Used for firing events and accessing variables where expansion needs to be accounted for
|
||||
# Special updated configuration we use for firing events
|
||||
#
|
||||
self.expanded_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.expanded_data)
|
||||
bb.parse.init_parser(self.expanded_data)
|
||||
self.event_data = bb.data.createCopy(self.data)
|
||||
bb.data.update_data(self.event_data)
|
||||
bb.parse.init_parser(self.event_data)
|
||||
|
||||
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
|
||||
self.disableDataTracking()
|
||||
@@ -381,7 +306,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
|
||||
self.data.appendVar(var, val, **loginfo)
|
||||
|
||||
def saveConfigurationVar(self, var, val, default_file, op):
|
||||
@@ -450,7 +375,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
|
||||
self.data.setVar(var, val, **loginfo)
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
@@ -512,29 +437,9 @@ class BBCooker:
|
||||
|
||||
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def updateConfigOpts(self, options, environment):
|
||||
def updateConfigOpts(self,options):
|
||||
for o in options:
|
||||
setattr(self.configuration, o, options[o])
|
||||
clean = True
|
||||
for k in bb.utils.approved_variables():
|
||||
if k in environment and k not in self.configuration.env:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if k in self.configuration.env and k not in environment:
|
||||
logger.debug(1, "Updating environment variable %s (deleted)" % (k))
|
||||
del self.configuration.env[k]
|
||||
clean = False
|
||||
if k not in self.configuration.env and k not in environment:
|
||||
continue
|
||||
if environment[k] != self.configuration.env[k]:
|
||||
logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
|
||||
self.configuration.env[k] = environment[k]
|
||||
clean = False
|
||||
if not clean:
|
||||
logger.debug(1, "Base environment change, triggering reparse")
|
||||
self.baseconfig_valid = False
|
||||
self.reset()
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
@@ -581,7 +486,7 @@ class BBCooker:
|
||||
fn = self.matchFile(fn)
|
||||
fn = bb.cache.Cache.realfn2virtual(fn, cls)
|
||||
elif len(pkgs_to_build) == 1:
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
if pkgs_to_build[0] in set(ignore.split()):
|
||||
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
|
||||
|
||||
@@ -661,7 +566,7 @@ class BBCooker:
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
|
||||
|
||||
return runlist, taskdata
|
||||
|
||||
|
||||
######## WARNING : this function requires cache_extra to be enabled ########
|
||||
|
||||
def generateTaskDepTreeData(self, pkgs_to_build, task):
|
||||
@@ -1113,30 +1018,42 @@ class BBCooker:
|
||||
# Check dependencies and store information for priority calculation
|
||||
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
|
||||
if deps:
|
||||
try:
|
||||
deplist = bb.utils.explode_dep_versions2(deps)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
for dep, oplist in deplist.iteritems():
|
||||
depnamelist = []
|
||||
deplist = deps.split()
|
||||
for dep in deplist:
|
||||
depsplit = dep.split(':')
|
||||
if len(depsplit) > 1:
|
||||
try:
|
||||
depver = int(depsplit[1])
|
||||
except ValueError:
|
||||
parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
|
||||
errors = True
|
||||
continue
|
||||
else:
|
||||
depver = None
|
||||
dep = depsplit[0]
|
||||
depnamelist.append(dep)
|
||||
|
||||
if dep in collection_list:
|
||||
for opstr in oplist:
|
||||
if depver:
|
||||
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
(op, depver) = opstr.split()
|
||||
if layerver:
|
||||
try:
|
||||
res = bb.utils.vercmp_string_op(layerver, depver, op)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
if not res:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
|
||||
lver = int(layerver)
|
||||
except ValueError:
|
||||
parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
|
||||
errors = True
|
||||
continue
|
||||
if lver != depver:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
|
||||
errors = True
|
||||
collection_depends[c] = deplist.keys()
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
collection_depends[c] = []
|
||||
|
||||
@@ -1188,7 +1105,7 @@ class BBCooker:
|
||||
bf = os.path.abspath(bf)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
@@ -1278,7 +1195,7 @@ class BBCooker:
|
||||
taskdata.add_provider(self.data, self.recipecache, item)
|
||||
|
||||
buildname = self.data.getVar("BUILDNAME")
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
|
||||
|
||||
# Execute the runqueue
|
||||
runlist = [[item, "do_%s" % task]]
|
||||
@@ -1305,7 +1222,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1453,14 +1370,14 @@ class BBCooker:
|
||||
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
|
||||
bb.event.fire(bb.event.SanityCheck(False), self.data)
|
||||
|
||||
ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
self.recipecache.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.recipecache.ignored_dependencies.add(dep)
|
||||
|
||||
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
|
||||
(filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
|
||||
|
||||
self.data.renameVar("__depends", "__base_depends")
|
||||
self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
|
||||
@@ -1476,7 +1393,7 @@ class BBCooker:
|
||||
raise bb.BBHandledException()
|
||||
self.show_appends_with_no_recipes()
|
||||
self.handlePrefProviders()
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
|
||||
self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
|
||||
self.state = state.running
|
||||
return None
|
||||
|
||||
@@ -1490,7 +1407,7 @@ class BBCooker:
|
||||
if len(pkgs_to_build) == 0:
|
||||
raise NothingToBuild
|
||||
|
||||
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
for pkg in pkgs_to_build:
|
||||
if pkg in ignore:
|
||||
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
@@ -1520,13 +1437,13 @@ class BBCooker:
|
||||
try:
|
||||
self.prhost = prserv.serv.auto_start(self.data)
|
||||
except prserv.serv.PRServiceConfigError:
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
self.state = state.error
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
prserv.serv.auto_shutdown(self.data)
|
||||
bb.event.fire(CookerExit(), self.expanded_data)
|
||||
bb.event.fire(CookerExit(), self.event_data)
|
||||
lockfile = self.lock.name
|
||||
self.lock.close()
|
||||
self.lock = None
|
||||
@@ -1555,7 +1472,6 @@ class BBCooker:
|
||||
msg += ":\n%s" % str(procs)
|
||||
print(msg)
|
||||
|
||||
|
||||
def shutdown(self, force = False):
|
||||
if force:
|
||||
self.state = state.forceshutdown
|
||||
@@ -1603,7 +1519,6 @@ class CookerExit(bb.event.Event):
|
||||
class CookerCollectFiles(object):
|
||||
def __init__(self, priorities):
|
||||
self.appendlist = {}
|
||||
self.bbappends = []
|
||||
self.appliedappendlist = []
|
||||
self.bbfile_config_priorities = priorities
|
||||
|
||||
@@ -1698,7 +1613,6 @@ class CookerCollectFiles(object):
|
||||
# Build a list of .bbappend files for each .bb file
|
||||
for f in bbappend:
|
||||
base = os.path.basename(f).replace('.bbappend', '.bb')
|
||||
self.bbappends.append((base, f))
|
||||
if not base in self.appendlist:
|
||||
self.appendlist[base] = []
|
||||
if f not in self.appendlist[base]:
|
||||
@@ -1724,14 +1638,14 @@ class CookerCollectFiles(object):
|
||||
"""
|
||||
filelist = []
|
||||
f = os.path.basename(fn)
|
||||
for b in self.bbappends:
|
||||
(bbappend, filename) = b
|
||||
for bbappend in self.appendlist:
|
||||
if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
|
||||
self.appliedappendlist.append(bbappend)
|
||||
filelist.append(filename)
|
||||
for filename in self.appendlist[bbappend]:
|
||||
filelist.append(filename)
|
||||
return filelist
|
||||
|
||||
def collection_priorities(self, pkgfns, d):
|
||||
def collection_priorities(self, pkgfns):
|
||||
|
||||
priorities = {}
|
||||
|
||||
@@ -1740,18 +1654,18 @@ class CookerCollectFiles(object):
|
||||
for p in pkgfns:
|
||||
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
|
||||
priorities[p] = self.calc_bbfile_priority(realfn, matched)
|
||||
|
||||
|
||||
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
|
||||
unmatched = set()
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
for _, _, regex, pri in self.bbfile_config_priorities:
|
||||
if not regex in matched:
|
||||
unmatched.add(regex)
|
||||
|
||||
def findmatch(regex):
|
||||
for b in self.bbappends:
|
||||
(bbfile, append) = b
|
||||
if regex.match(append):
|
||||
return True
|
||||
for bbfile in self.appendlist:
|
||||
for append in self.appendlist[bbfile]:
|
||||
if regex.match(append):
|
||||
return True
|
||||
return False
|
||||
|
||||
for unmatch in unmatched.copy():
|
||||
@@ -1760,8 +1674,7 @@ class CookerCollectFiles(object):
|
||||
|
||||
for collection, pattern, regex, _ in self.bbfile_config_priorities:
|
||||
if regex in unmatched:
|
||||
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
return priorities
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
|
||||
parselog = logging.getLogger("BitBake.Parsing")
|
||||
|
||||
class ConfigParameters(object):
|
||||
def __init__(self, argv=sys.argv):
|
||||
self.options, targets = self.parseCommandLine(argv)
|
||||
def __init__(self):
|
||||
self.options, targets = self.parseCommandLine()
|
||||
self.environment = self.parseEnvironment()
|
||||
|
||||
self.options.pkgs_to_build = targets or []
|
||||
@@ -46,7 +46,7 @@ class ConfigParameters(object):
|
||||
for key, val in self.options.__dict__.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
raise Exception("Caller must implement commandline option parsing")
|
||||
|
||||
def parseEnvironment(self):
|
||||
@@ -69,14 +69,14 @@ class ConfigParameters(object):
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server, environment):
|
||||
def updateToServer(self, server):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options, environment])
|
||||
ret, error = server.runCommand(["updateConfig", options])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
@@ -139,7 +139,6 @@ class CookerConfiguration(object):
|
||||
self.dry_run = False
|
||||
self.tracking = False
|
||||
self.interface = []
|
||||
self.writeeventlog = False
|
||||
|
||||
self.env = {}
|
||||
|
||||
|
||||
@@ -296,14 +296,9 @@ class VariableHistory(object):
|
||||
self.variables[var] = []
|
||||
|
||||
class DataSmart(MutableMapping):
|
||||
def __init__(self, special = None, seen = None ):
|
||||
def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
|
||||
self.dict = {}
|
||||
|
||||
if special is None:
|
||||
special = COWDictBase.copy()
|
||||
if seen is None:
|
||||
seen = COWDictBase.copy()
|
||||
|
||||
self.inchistory = IncludeHistory()
|
||||
self.varhistory = VariableHistory(self)
|
||||
self._tracking = False
|
||||
@@ -594,7 +589,7 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
@@ -610,8 +605,8 @@ class DataSmart(MutableMapping):
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -746,16 +741,12 @@ class DataSmart(MutableMapping):
|
||||
yield key
|
||||
|
||||
def __iter__(self):
|
||||
deleted = set()
|
||||
def keylist(d):
|
||||
klist = set()
|
||||
for key in d:
|
||||
if key == "_data":
|
||||
continue
|
||||
if key in deleted:
|
||||
continue
|
||||
if not d[key]:
|
||||
deleted.add(key)
|
||||
continue
|
||||
klist.add(key)
|
||||
|
||||
|
||||
@@ -68,7 +68,6 @@ _ui_logfilters = {}
|
||||
_ui_handler_seq = 0
|
||||
_event_handler_map = {}
|
||||
_catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
@@ -96,9 +95,6 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
continue
|
||||
execute_handler(name, handler, event, d)
|
||||
|
||||
ui_queue = []
|
||||
@@ -209,10 +205,6 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
|
||||
def register_UIHhandler(handler):
|
||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||
_ui_handlers[_ui_handler_seq] = handler
|
||||
|
||||
@@ -45,13 +45,6 @@ _checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
class BBFetchException(Exception):
|
||||
"""Class all fetch exceptions inherit from"""
|
||||
def __init__(self, message):
|
||||
@@ -532,7 +525,7 @@ def fetcher_compare_revisions(d):
|
||||
def mirror_from_string(data):
|
||||
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||
|
||||
def verify_checksum(ud, d, precomputed={}):
|
||||
def verify_checksum(ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
@@ -540,28 +533,13 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
|
||||
Returns a dict of checksums that can be stored in a done stamp file and
|
||||
passed in as precomputed parameter in a later call to avoid re-computing
|
||||
the checksums from the file. This allows verifying the checksums of the
|
||||
file against those in the recipe each time, rather than only after
|
||||
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
|
||||
"""
|
||||
|
||||
_MD5_KEY = "md5"
|
||||
_SHA256_KEY = "sha256"
|
||||
if not ud.method.supports_checksum(ud):
|
||||
return
|
||||
|
||||
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
|
||||
return {}
|
||||
|
||||
if _MD5_KEY in precomputed:
|
||||
md5data = precomputed[_MD5_KEY]
|
||||
else:
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
|
||||
if _SHA256_KEY in precomputed:
|
||||
sha256data = precomputed[_SHA256_KEY]
|
||||
else:
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
@@ -611,72 +589,6 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
|
||||
|
||||
return {
|
||||
_MD5_KEY: md5data,
|
||||
_SHA256_KEY: sha256data
|
||||
}
|
||||
|
||||
|
||||
def verify_donestamp(ud, d):
|
||||
"""
|
||||
Check whether the done stamp file has the right checksums (if the fetch
|
||||
method supports them). If it doesn't, delete the done stamp and force
|
||||
a re-download.
|
||||
|
||||
Returns True, if the donestamp exists and is valid, False otherwise. When
|
||||
returning False, any existing done stamps are removed.
|
||||
"""
|
||||
if not os.path.exists(ud.donestamp):
|
||||
return False
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
# done stamp exists, checksums not supported; assume the local file is
|
||||
# current
|
||||
return True
|
||||
|
||||
if not os.path.exists(ud.localpath):
|
||||
# done stamp exists, but the downloaded file does not; the done stamp
|
||||
# must be incorrect, re-trigger the download
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
precomputed_checksums = {}
|
||||
# Only re-use the precomputed checksums if the donestamp is newer than the
|
||||
# file. Do not rely on the mtime of directories, though. If ud.localpath is
|
||||
# a directory, there will probably not be any checksums anyway.
|
||||
if (os.path.isdir(ud.localpath) or
|
||||
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
|
||||
try:
|
||||
with open(ud.donestamp, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
precomputed_checksums.update(pickled.load())
|
||||
except Exception as e:
|
||||
# Avoid the warnings on the upgrade path from emtpy done stamp
|
||||
# files to those containing the checksums.
|
||||
if not isinstance(e, EOFError):
|
||||
# Ignore errors, they aren't fatal
|
||||
logger.warn("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
|
||||
try:
|
||||
checksums = verify_checksum(ud, d, precomputed_checksums)
|
||||
# If the cache file did not have the checksums, compute and store them
|
||||
# as an upgrade path from the previous done stamp file format.
|
||||
if checksums != precomputed_checksums:
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
return True
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
return False
|
||||
|
||||
|
||||
def update_stamp(ud, d):
|
||||
"""
|
||||
@@ -691,11 +603,8 @@ def update_stamp(ud, d):
|
||||
# Errors aren't fatal here
|
||||
pass
|
||||
else:
|
||||
checksums = verify_checksum(ud, d)
|
||||
# Store the checksums for later re-verification against the recipe
|
||||
with open(ud.donestamp, "wb") as cachefile:
|
||||
p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
|
||||
p.dump(checksums)
|
||||
verify_checksum(ud, d)
|
||||
open(ud.donestamp, 'w').close()
|
||||
|
||||
def subprocess_setup():
|
||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
||||
@@ -711,13 +620,11 @@ def get_autorev(d):
|
||||
|
||||
def get_srcrev(d):
|
||||
"""
|
||||
Return the revsion string, usually for use in the version string (PV) of the current package
|
||||
Return the version string for the current package
|
||||
(usually to be used as PV)
|
||||
Most packages usually only have one SCM so we just pass on the call.
|
||||
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||
have been set.
|
||||
|
||||
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
|
||||
incremental, other code is then responsible for turning that into an increasing value (if needed)
|
||||
"""
|
||||
|
||||
scms = []
|
||||
@@ -896,7 +803,7 @@ def try_mirror_url(origud, ud, ld, check = False):
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not verify_donestamp(ud, ld) or ud.method.need_update(ud, ld):
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
|
||||
ud.method.download(ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(ud, ld)
|
||||
@@ -912,13 +819,12 @@ def try_mirror_url(origud, ud, ld, check = False):
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
# Create donestamp in old format to avoid triggering a re-download
|
||||
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
|
||||
if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
|
||||
origud.method.download(origud, ld)
|
||||
if hasattr(origud.method,"build_mirror_data"):
|
||||
origud.method.build_mirror_data(origud, ld)
|
||||
@@ -1136,7 +1042,6 @@ class FetchData(object):
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
self.ignore_checksums = False
|
||||
|
||||
self.names = self.parm.get("name",'default').split(',')
|
||||
|
||||
@@ -1293,9 +1198,9 @@ class FetchMethod(object):
|
||||
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
|
||||
(file, urldata.parm.get('unpack')))
|
||||
|
||||
base, ext = os.path.splitext(file)
|
||||
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
|
||||
efile = os.path.join(rootdir, os.path.basename(base))
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -1315,10 +1220,6 @@ class FetchMethod(object):
|
||||
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.xz'):
|
||||
cmd = 'xz -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.tar.lz'):
|
||||
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.lz'):
|
||||
cmd = 'lzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip') or file.endswith('.jar'):
|
||||
try:
|
||||
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
|
||||
@@ -1514,7 +1415,7 @@ class Fetch(object):
|
||||
try:
|
||||
self.d.setVar("BB_NO_NETWORK", network)
|
||||
|
||||
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
|
||||
if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
|
||||
localpath = ud.localpath
|
||||
elif m.try_premirror(ud, self.d):
|
||||
logger.debug(1, "Trying PREMIRRORS")
|
||||
@@ -1527,7 +1428,7 @@ class Fetch(object):
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)):
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
|
||||
try:
|
||||
logger.debug(1, "Trying Upstream")
|
||||
m.download(ud, self.d)
|
||||
|
||||
@@ -67,7 +67,6 @@ Supported SRC_URI options are:
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -340,63 +339,20 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
output = self._lsremote(ud, d, "")
|
||||
# Tags of the form ^{} may not work, need to fallback to other form
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
head = ud.unresolvedrev[name]
|
||||
tag = ud.unresolvedrev[name]
|
||||
search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
else:
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output" % ud.unresolvedrev[name])
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
verstring = ""
|
||||
tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*^{}")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return ""
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
line = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", line)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(line)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
verstring = tag
|
||||
|
||||
return verstring
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
def checkstatus(self, ud, d):
|
||||
fetchcmd = "%s ls-remote %s" % (ud.basecmd, ud.url)
|
||||
try:
|
||||
self._lsremote(ud, d, "")
|
||||
runfetchcmd(fetchcmd, d, quiet=True)
|
||||
return True
|
||||
except FetchError:
|
||||
return False
|
||||
|
||||
@@ -87,8 +87,7 @@ class SSH(FetchMethod):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
|
||||
os.path.basename(os.path.normpath(path)))
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
|
||||
def download(self, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,7 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -108,274 +104,3 @@ class Wget(FetchMethod):
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
pname = ''
|
||||
pver = ''
|
||||
ptype = ''
|
||||
|
||||
mdict = m.groupdict()
|
||||
if 'name' in mdict.keys():
|
||||
pname = mdict['name']
|
||||
if 'pver' in mdict.keys():
|
||||
pver = mdict['pver']
|
||||
if 'type' in mdict.keys():
|
||||
ptype = mdict['type']
|
||||
|
||||
bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
|
||||
|
||||
return (pname, pver, ptype)
|
||||
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.1000.', version)
|
||||
version = re.sub('(beta)+', '.100.', version)
|
||||
version = re.sub('(alpha)+', '.10.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile()
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return ""
|
||||
"""
|
||||
valid = 0
|
||||
version = ['', '', '']
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return ""
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
bb.debug(3, "line['href'] = '%s'" % (line['href']))
|
||||
bb.debug(3, "line = '%s'" % (str(line)))
|
||||
|
||||
newver = self._parse_path(package_regex, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(package_regex, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) < 0:
|
||||
version = newver
|
||||
|
||||
pupver = re.sub('_', '.', version[1])
|
||||
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, pupver or "N/A", current_version[1]))
|
||||
|
||||
if valid:
|
||||
return pupver
|
||||
|
||||
return ""
|
||||
|
||||
def _check_latest_version_by_dir(self, dirver, package, package_regex,
|
||||
current_version, ud, d):
|
||||
"""
|
||||
Scan every directory in order to get upstream version.
|
||||
"""
|
||||
version_dir = ['', '', '']
|
||||
version = ['', '', '']
|
||||
|
||||
dirver_regex = re.compile("(\D*)((\d+[\.-_])+(\d+))")
|
||||
s = dirver_regex.search(dirver)
|
||||
if s:
|
||||
version_dir[1] = s.group(2)
|
||||
else:
|
||||
version_dir[1] = dirver
|
||||
|
||||
dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
|
||||
|
||||
soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
|
||||
if not soup:
|
||||
return version[1]
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
s = dirver_regex.search(line['href'].strip("/"))
|
||||
if s:
|
||||
version_dir_new = ['', s.group(2), '']
|
||||
if self._vercmp(version_dir, version_dir_new) <= 0:
|
||||
dirver_new = s.group(1) + s.group(2)
|
||||
path = ud.path.replace(dirver, dirver_new, True) \
|
||||
.split(package)[0]
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path,
|
||||
ud.user, ud.pswd, {}])
|
||||
|
||||
pupver = self._check_latest_version(uri,
|
||||
package, package_regex, current_version, ud, d)
|
||||
if pupver:
|
||||
version[1] = pupver
|
||||
|
||||
version_dir = version_dir_new
|
||||
|
||||
return version[1]
|
||||
|
||||
def _init_regexes(self, package, ud, d):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# compile regex, can be specific by package or generic regex
|
||||
pn_regex = d.getVar('REGEX', True)
|
||||
if pn_regex:
|
||||
package_custom_regex_comp = re.compile(pn_regex)
|
||||
else:
|
||||
version = self._parse_path(package_regex_comp, package)
|
||||
if version:
|
||||
package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
|
||||
(re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
|
||||
else:
|
||||
package_custom_regex_comp = None
|
||||
|
||||
return package_custom_regex_comp
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
current_version = ['', d.getVar('PV', True), '']
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
current_version[1] = re.sub('_', '.', current_version[1])
|
||||
current_version[1] = re.sub('-', '.', current_version[1])
|
||||
return current_version[1]
|
||||
|
||||
package_regex = self._init_regexes(package, ud, d)
|
||||
if package_regex is None:
|
||||
bb.warn("latest_versionstring: package %s don't match pattern" % (package))
|
||||
return ""
|
||||
bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
|
||||
|
||||
uri = ""
|
||||
regex_uri = d.getVar("REGEX_URI", True)
|
||||
if not regex_uri:
|
||||
path = ud.path.split(package)[0]
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
|
||||
m = dirver_regex.search(path)
|
||||
if m:
|
||||
pn = d.getVar('PN', True)
|
||||
dirver = m.group('dirver')
|
||||
|
||||
dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
|
||||
if not dirver_pn_regex.search(dirver):
|
||||
return self._check_latest_version_by_dir(dirver,
|
||||
package, package_regex, current_version, ud, d)
|
||||
|
||||
uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
uri = regex_uri
|
||||
|
||||
return self._check_latest_version(uri, package, package_regex,
|
||||
current_version, ud, d)
|
||||
|
||||
@@ -1,391 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2003, 2004 Phil Blundell
|
||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
||||
# Copyright (C) 2005 ROAD GmbH
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
import bb.msg
|
||||
from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
__version__ = "1.26.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class BBMainException(bb.BBHandledException):
|
||||
pass
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__("bb.ui", fromlist = [interface])
|
||||
return getattr(module, interface)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
_warnings_showwarning = warnings.showwarning
|
||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
if file is not None:
|
||||
if _warnings_showwarning is not None:
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||
|
||||
class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||
usage = """%prog [options] [recipename/target ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
|
||||
action = "store", dest = "ui")
|
||||
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
if "BBEVENTLOG" is os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
def start_server(servermodule, configParams, configuration, features):
|
||||
server = servermodule.BitBakeServer()
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)))
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer()
|
||||
configuration.interface = []
|
||||
|
||||
try:
|
||||
configuration.setServerRegIdleCallback(server.getServerIdleCB())
|
||||
|
||||
cooker = bb.cooker.BBCooker(configuration, features)
|
||||
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
|
||||
|
||||
def bitbake_main(configParams, configuration):
|
||||
|
||||
# Python multiprocessing requires /dev/shm on Linux
|
||||
if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
|
||||
raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
configuration.setConfigParameters(configParams)
|
||||
|
||||
ui_module = get_ui(configParams)
|
||||
|
||||
# Server type can be xmlrpc or process currently, if nothing is specified,
|
||||
# the default server is process
|
||||
if configParams.servertype:
|
||||
server_type = configParams.servertype
|
||||
else:
|
||||
server_type = 'process'
|
||||
|
||||
try:
|
||||
module = __import__("bb.server", fromlist = [server_type])
|
||||
servermodule = getattr(module, server_type)
|
||||
except AttributeError:
|
||||
raise BBMainException("FATAL: Invalid server type '%s' specified.\n"
|
||||
"Valid interfaces: xmlrpc, process [default]." % server_type)
|
||||
|
||||
if configParams.server_only:
|
||||
if configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
|
||||
"servertype as 'xmlrpc'.\n")
|
||||
if not configParams.bind:
|
||||
raise BBMainException("FATAL: The '--server-only' option requires a name/address "
|
||||
"to bind to with the -B option.\n")
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.remote_server and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '--remote-server' is defined, we must "
|
||||
"set the servertype as 'xmlrpc'.\n")
|
||||
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
if not configParams.status_only:
|
||||
# In status only mode there are no logs and no UI
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Clear away any spurious environment variables while we stoke up the cooker
|
||||
cleanedvars = bb.utils.clean_environment()
|
||||
|
||||
featureset = []
|
||||
if not configParams.server_only:
|
||||
# Collect the feature set for the UI
|
||||
featureset = getattr(ui_module, "featureSet", [])
|
||||
|
||||
if not configParams.remote_server:
|
||||
# we start a server with a given configuration
|
||||
server = start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.kill_server:
|
||||
return 0
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
os.environ[k] = cleanedvars[k]
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
return 0
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -139,7 +139,7 @@ class DataNode(AstNode):
|
||||
data.setVar(key, val, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%', '______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
import re, os
|
||||
import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger, handle
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"""
|
||||
^
|
||||
@@ -66,36 +66,38 @@ def init(data):
|
||||
def supports(fn, d):
|
||||
return fn[-5:] == ".conf"
|
||||
|
||||
def include(parentfn, fn, lineno, data, error_out):
|
||||
def include(oldfn, fn, lineno, data, error_out):
|
||||
"""
|
||||
error_out: A string indicating the verb (e.g. "include", "inherit") to be
|
||||
used in a ParseError that will be raised if the file to be included could
|
||||
not be included. Specify False to avoid raising an error in this case.
|
||||
"""
|
||||
if parentfn == fn: # prevent infinite recursion
|
||||
if oldfn == fn: # prevent infinite recursion
|
||||
return None
|
||||
|
||||
import bb
|
||||
fn = data.expand(fn)
|
||||
parentfn = data.expand(parentfn)
|
||||
oldfn = data.expand(oldfn)
|
||||
|
||||
if not os.path.isabs(fn):
|
||||
dname = os.path.dirname(parentfn)
|
||||
dname = os.path.dirname(oldfn)
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
from bb.parse import handle
|
||||
try:
|
||||
ret = bb.parse.handle(fn, data, True)
|
||||
ret = handle(fn, data, True)
|
||||
except (IOError, OSError):
|
||||
if error_out:
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), parentfn, lineno)
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
bb.parse.mark_dependency(data, fn)
|
||||
|
||||
|
||||
@@ -430,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -1155,7 +1155,7 @@ class RunQueue:
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
for v in valid:
|
||||
valid_new.add(sq_task[v])
|
||||
@@ -1333,7 +1333,7 @@ class RunQueueExecute:
|
||||
taskname = self.rqdata.runq_task[depid]
|
||||
taskdata[dep] = [pn, taskname, fn]
|
||||
call = self.rq.depvalidate + "(task, taskdata, notneeded, d)"
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data }
|
||||
locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
return valid
|
||||
|
||||
@@ -1402,7 +1402,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks }
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
@@ -1822,7 +1822,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
sq_taskname.append(taskname)
|
||||
sq_task.append(task)
|
||||
call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data }
|
||||
locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data }
|
||||
valid = bb.utils.better_eval(call, locs)
|
||||
|
||||
valid_new = stamppresent
|
||||
|
||||
@@ -115,7 +115,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
self.quitout.recv()
|
||||
self.quit = True
|
||||
|
||||
self.idle_commands(.1, [self.command_channel, self.quitout])
|
||||
self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
|
||||
except Exception:
|
||||
logger.exception('Running command %s', command)
|
||||
|
||||
|
||||
@@ -235,7 +235,6 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
@@ -252,9 +251,6 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if retval == None:
|
||||
# the function execute failed; delete it
|
||||
del self._idlefuns[function]
|
||||
pass
|
||||
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
@@ -306,8 +302,6 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
|
||||
_, error = self.connection.runCommand(["setFeatures", self.featureset])
|
||||
if error:
|
||||
# disconnect the client, we can't make the setFeature work
|
||||
self.connection.removeClient()
|
||||
# no need to log it here, the error shall be sent to the client
|
||||
raise BaseException(error)
|
||||
|
||||
|
||||
@@ -195,12 +195,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if cs:
|
||||
data = data + cs
|
||||
|
||||
taskdep = dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
# Nostamp tasks need an implicit taint so that they force any dependent tasks to run
|
||||
import uuid
|
||||
data = data + str(uuid.uuid4())
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
@@ -314,12 +308,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -419,17 +407,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
@@ -506,17 +483,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -121,12 +121,6 @@ class DataExpansions(unittest.TestCase):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
|
||||
@@ -24,7 +24,6 @@ import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -566,83 +565,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchMethodTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid REGEX_URI and REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
if os.environ.get("BB_SKIP_NETTESTS") == "yes":
|
||||
print("Unset BB_SKIP_NETTESTS to run network tests")
|
||||
else:
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("PN", k[0])
|
||||
self.d.setVar("REGEX_URI", k[2])
|
||||
self.d.setVar("REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
|
||||
@@ -36,10 +36,6 @@ class VerCmpString(unittest.TestCase):
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
|
||||
self.assertTrue(result > 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
|
||||
@@ -95,7 +95,7 @@ class TinfoilConfigParameters(ConfigParameters):
|
||||
self.initial_options = options
|
||||
super(TinfoilConfigParameters, self).__init__()
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
def parseCommandLine(self):
|
||||
class DummyOptions:
|
||||
def __init__(self, initial_options):
|
||||
for key, val in initial_options.items():
|
||||
|
||||
@@ -26,20 +26,12 @@ os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
|
||||
|
||||
import toaster.toastermain.settings as toaster_django_settings
|
||||
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
||||
from toaster.orm.models import Target_Image_File, BuildArtifact
|
||||
from toaster.orm.models import Target_Image_File
|
||||
from toaster.orm.models import Variable, VariableHistory
|
||||
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
|
||||
from toaster.orm.models import Task_Dependency, Package_Dependency
|
||||
from toaster.orm.models import Recipe_Dependency
|
||||
from bb.msg import BBLogFormatter as format
|
||||
from django.db import models
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
from django.db import transaction, connection
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
|
||||
class NotExisting(Exception):
|
||||
pass
|
||||
@@ -51,57 +43,8 @@ class ORMWrapper(object):
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.layer_version_objects = []
|
||||
self.task_objects = {}
|
||||
self.recipe_objects = {}
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _build_key(**kwargs):
|
||||
key = "0"
|
||||
for k in sorted(kwargs.keys()):
|
||||
if isinstance(kwargs[k], models.Model):
|
||||
key += "-%d" % kwargs[k].id
|
||||
else:
|
||||
key += "-%s" % str(kwargs[k])
|
||||
return key
|
||||
|
||||
|
||||
def _cached_get_or_create(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get_or_create. We assume that the objects will not be created in the
|
||||
database through any other means.
|
||||
"""
|
||||
|
||||
assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
created = False
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.create(**kwargs)
|
||||
created = True
|
||||
|
||||
return (vars(self)[dictname][key], created)
|
||||
|
||||
|
||||
def _cached_get(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get. We assume that the objects will not change in the database between gets.
|
||||
"""
|
||||
assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.get(**kwargs)
|
||||
|
||||
return vars(self)[dictname][key]
|
||||
|
||||
def create_build_object(self, build_info, brbe):
|
||||
assert 'machine' in build_info
|
||||
@@ -122,18 +65,13 @@ class ORMWrapper(object):
|
||||
build_name=build_info['build_name'],
|
||||
bitbake_version=build_info['bitbake_version'])
|
||||
|
||||
logger.debug(1, "buildinfohelper: build is created %s" % build)
|
||||
if brbe is not None:
|
||||
logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
buildrequest.build = build
|
||||
buildrequest.save()
|
||||
|
||||
build.project_id = buildrequest.project_id
|
||||
build.save()
|
||||
|
||||
return build
|
||||
|
||||
def create_target_objects(self, target_info):
|
||||
@@ -145,7 +83,7 @@ class ORMWrapper(object):
|
||||
tgt_object = Target.objects.create( build = target_info['build'],
|
||||
target = tgt_name,
|
||||
is_image = False,
|
||||
)
|
||||
);
|
||||
targets.append(tgt_object)
|
||||
return targets
|
||||
|
||||
@@ -165,7 +103,8 @@ class ORMWrapper(object):
|
||||
build.outcome = outcome
|
||||
build.save()
|
||||
|
||||
def update_target_set_license_manifest(self, target, license_manifest_path):
|
||||
def update_target_object(self, target, license_manifest_path):
|
||||
|
||||
target.license_manifest_path = license_manifest_path
|
||||
target.save()
|
||||
|
||||
@@ -174,47 +113,39 @@ class ORMWrapper(object):
|
||||
assert 'recipe' in task_information
|
||||
assert 'task_name' in task_information
|
||||
|
||||
# we use must_exist info for database look-up optimization
|
||||
task_object, created = self._cached_get_or_create(Task,
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name']
|
||||
)
|
||||
if created and must_exist:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
task_object, created = Task.objects.get_or_create(
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name'],
|
||||
)
|
||||
|
||||
if must_exist and created:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
task_object.delete()
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(task_object):
|
||||
if v in task_information.keys():
|
||||
if vars(task_object)[v] != task_information[v]:
|
||||
vars(task_object)[v] = task_information[v]
|
||||
object_changed = True
|
||||
vars(task_object)[v] = task_information[v]
|
||||
|
||||
# update setscene-related information if the task has a setscene
|
||||
if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
object_changed = True
|
||||
# update setscene-related information
|
||||
if 1 == Task.objects.related_setscene(task_object).count():
|
||||
if task_object.outcome == Task.OUTCOME_COVERED:
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
|
||||
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
|
||||
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
|
||||
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
|
||||
task_object.sstate_result = Task.SSTATE_RESTORED
|
||||
object_changed = True
|
||||
elif outcome_task_setscene == Task.OUTCOME_FAILED:
|
||||
task_object.sstate_result = Task.SSTATE_FAILED
|
||||
object_changed = True
|
||||
|
||||
# mark down duration if we have a start time and a current time
|
||||
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
|
||||
duration = task_information['end_time'] - task_information['start_time']
|
||||
task_object.elapsed_time = duration
|
||||
object_changed = True
|
||||
del task_information['start_time']
|
||||
del task_information['end_time']
|
||||
|
||||
if object_changed:
|
||||
task_object.save()
|
||||
task_object.save()
|
||||
return task_object
|
||||
|
||||
|
||||
@@ -222,22 +153,20 @@ class ORMWrapper(object):
|
||||
assert 'layer_version' in recipe_information
|
||||
assert 'file_path' in recipe_information
|
||||
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
if created and must_exist:
|
||||
recipe_object, created = Recipe.objects.get_or_create(
|
||||
layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
|
||||
if must_exist and created:
|
||||
recipe_object.delete()
|
||||
raise NotExisting("Recipe object created when expected to exist", recipe_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(recipe_object):
|
||||
if v in recipe_information.keys():
|
||||
object_changed = True
|
||||
vars(recipe_object)[v] = recipe_information[v]
|
||||
|
||||
if object_changed:
|
||||
recipe_object.save()
|
||||
recipe_object.save()
|
||||
|
||||
return recipe_object
|
||||
|
||||
@@ -256,53 +185,19 @@ class ORMWrapper(object):
|
||||
priority = layer_version_information['priority']
|
||||
)
|
||||
|
||||
self.layer_version_objects.append(layer_version_object)
|
||||
|
||||
return layer_version_object
|
||||
|
||||
def get_update_layer_object(self, layer_information, brbe):
|
||||
def get_update_layer_object(self, layer_information):
|
||||
assert 'name' in layer_information
|
||||
assert 'local_path' in layer_information
|
||||
assert 'layer_index_url' in layer_information
|
||||
|
||||
if brbe is None:
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
local_path=layer_information['local_path'],
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
return layer_object
|
||||
else:
|
||||
# we are under managed mode; we must match the layer used in the Project Layer
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br_id, be_id = brbe.split(":")
|
||||
|
||||
# find layer by checkout path;
|
||||
from bldcontrol import bbcontroller
|
||||
bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
|
||||
|
||||
# we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
|
||||
# but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
|
||||
|
||||
# note that this is different
|
||||
buildrequest = BuildRequest.objects.get(pk = br_id)
|
||||
for brl in buildrequest.brlayer_set.all():
|
||||
localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
|
||||
# we get a relative path, unless running in HEAD mode where the path is absolute
|
||||
if not localdirname.startswith("/"):
|
||||
localdirname = os.path.join(bc.be.sourcedir, localdirname)
|
||||
#logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
|
||||
if localdirname.startswith(layer_information['local_path']):
|
||||
# we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
|
||||
#logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
|
||||
for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
|
||||
if pl.layercommit.layer.vcs_url == brl.giturl :
|
||||
layer = pl.layercommit.layer
|
||||
layer.local_path = layer_information['local_path']
|
||||
layer.save()
|
||||
return layer
|
||||
|
||||
raise NotExisting("Unidentified layer %s" % pformat(layer_information))
|
||||
|
||||
return layer_object
|
||||
|
||||
def save_target_file_information(self, build_obj, target_obj, filedata):
|
||||
assert isinstance(build_obj, Build)
|
||||
@@ -334,7 +229,7 @@ class ORMWrapper(object):
|
||||
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
||||
if len(parent_path) == 0:
|
||||
parent_path = "/"
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
@@ -368,7 +263,7 @@ class ORMWrapper(object):
|
||||
permission = permission,
|
||||
owner = user,
|
||||
group = group)
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj.directory = parent_obj
|
||||
tf_obj.save()
|
||||
|
||||
@@ -423,7 +318,8 @@ class ORMWrapper(object):
|
||||
searchname = pkgpnmap[p]['OPKGN']
|
||||
|
||||
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
|
||||
if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
|
||||
if created:
|
||||
# package was not build in the current build, but
|
||||
# fill in everything we can from the runtime-reverse package data
|
||||
try:
|
||||
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
|
||||
@@ -437,14 +333,11 @@ class ORMWrapper(object):
|
||||
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
|
||||
|
||||
# no files recorded for this package, so save files info
|
||||
packagefile_objects = []
|
||||
for targetpath in pkgpnmap[p]['FILES_INFO']:
|
||||
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
|
||||
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
|
||||
Package_File.objects.create( package = packagedict[p]['object'],
|
||||
path = targetpath,
|
||||
size = targetfilesize))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = targetfilesize)
|
||||
except KeyError as e:
|
||||
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
|
||||
|
||||
@@ -454,7 +347,6 @@ class ORMWrapper(object):
|
||||
|
||||
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
|
||||
|
||||
packagedeps_objs = []
|
||||
for p in packagedict:
|
||||
for (px,deptype) in packagedict[p]['depends']:
|
||||
if deptype == 'depends':
|
||||
@@ -462,32 +354,19 @@ class ORMWrapper(object):
|
||||
elif deptype == 'recommends':
|
||||
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
|
||||
|
||||
packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
|
||||
Package_Dependency.objects.create( package = packagedict[p]['object'],
|
||||
depends_on = packagedict[px]['object'],
|
||||
dep_type = tdeptype,
|
||||
target = target_obj))
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
target = target_obj);
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s" % errormsg)
|
||||
raise Exception(errormsg)
|
||||
|
||||
def save_target_image_file_information(self, target_obj, file_name, file_size):
|
||||
target_image_file = Target_Image_File.objects.create( target = target_obj,
|
||||
file_name = file_name,
|
||||
file_size = file_size)
|
||||
|
||||
def save_artifact_information(self, build_obj, file_name, file_size):
|
||||
# we skip the image files from other builds
|
||||
if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
|
||||
return
|
||||
|
||||
# do not update artifacts found in other builds
|
||||
if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
|
||||
return
|
||||
|
||||
BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
|
||||
target_image_file.save()
|
||||
|
||||
def create_logmessage(self, log_information):
|
||||
assert 'build' in log_information
|
||||
@@ -529,13 +408,10 @@ class ORMWrapper(object):
|
||||
bp_object.save()
|
||||
|
||||
# save any attached file information
|
||||
packagefile_objects = []
|
||||
for path in package_info['FILES_INFO']:
|
||||
packagefile_objects.append(Package_File( package = bp_object,
|
||||
fo = Package_File.objects.create( package = bp_object,
|
||||
path = path,
|
||||
size = package_info['FILES_INFO'][path] ))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = package_info['FILES_INFO'][path] )
|
||||
|
||||
def _po_byname(p):
|
||||
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
|
||||
@@ -544,44 +420,39 @@ class ORMWrapper(object):
|
||||
pkg.save()
|
||||
return pkg
|
||||
|
||||
packagedeps_objs = []
|
||||
# save soft dependency information
|
||||
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
|
||||
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
|
||||
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
|
||||
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
|
||||
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
|
||||
if 'RREPLACES' in package_info and package_info['RREPLACES']:
|
||||
for p in bb.utils.explode_deps(package_info['RREPLACES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
|
||||
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
|
||||
|
||||
return bp_object
|
||||
|
||||
def save_build_variables(self, build_obj, vardump):
|
||||
assert isinstance(build_obj, Build)
|
||||
|
||||
helptext_objects = []
|
||||
for k in vardump:
|
||||
desc = vardump[k]['doc']
|
||||
desc = vardump[k]['doc'];
|
||||
if desc is None:
|
||||
var_words = [word for word in k.split('_')]
|
||||
root_var = "_".join([word for word in var_words if word.isupper()])
|
||||
@@ -589,31 +460,25 @@ class ORMWrapper(object):
|
||||
desc = vardump[root_var]['doc']
|
||||
if desc is None:
|
||||
desc = ''
|
||||
if len(desc):
|
||||
helptext_objects.append(HelpText(build=build_obj,
|
||||
if desc:
|
||||
helptext_obj = HelpText.objects.create(build=build_obj,
|
||||
area=HelpText.VARIABLE,
|
||||
key=k,
|
||||
text=desc))
|
||||
text=desc)
|
||||
if not bool(vardump[k]['func']):
|
||||
value = vardump[k]['v']
|
||||
value = vardump[k]['v'];
|
||||
if value is None:
|
||||
value = ''
|
||||
variable_obj = Variable.objects.create( build = build_obj,
|
||||
variable_name = k,
|
||||
variable_value = value,
|
||||
description = desc)
|
||||
|
||||
varhist_objects = []
|
||||
for vh in vardump[k]['history']:
|
||||
if not 'documentation.conf' in vh['file']:
|
||||
varhist_objects.append(VariableHistory( variable = variable_obj,
|
||||
VariableHistory.objects.create( variable = variable_obj,
|
||||
file_name = vh['file'],
|
||||
line_number = vh['line'],
|
||||
operation = vh['op']))
|
||||
if len(varhist_objects):
|
||||
VariableHistory.objects.bulk_create(varhist_objects)
|
||||
|
||||
HelpText.objects.bulk_create(helptext_objects)
|
||||
operation = vh['op'])
|
||||
|
||||
class MockEvent: pass # sometimes we mock an event, declare it here
|
||||
|
||||
@@ -630,16 +495,11 @@ class BuildInfoHelper(object):
|
||||
self.internal_state = {}
|
||||
self.internal_state['taskdata'] = {}
|
||||
self.task_order = 0
|
||||
self.autocommit_step = 1
|
||||
self.server = server
|
||||
# we use manual transactions if the database doesn't autocommit on us
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
self.orm_wrapper = ORMWrapper()
|
||||
self.has_build_history = has_build_history
|
||||
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
|
||||
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
|
||||
logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
|
||||
|
||||
|
||||
def _configure_django(self):
|
||||
@@ -684,47 +544,19 @@ class BuildInfoHelper(object):
|
||||
assert path.startswith("/")
|
||||
assert 'build' in self.internal_state
|
||||
|
||||
if self.brbe is None:
|
||||
def _slkey_interactive(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
return len(layer_version.layer.local_path)
|
||||
def _slkey(layer_version):
|
||||
assert isinstance(layer_version, Layer_Version)
|
||||
return len(layer_version.layer.local_path)
|
||||
|
||||
# Heuristics: we always match recipe to the deepest layer path in the discovered layers
|
||||
for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
|
||||
# we can match to the recipe file path
|
||||
if path.startswith(lvo.layer.local_path):
|
||||
return lvo
|
||||
# Heuristics: we always match recipe to the deepest layer path that
|
||||
# we can match to the recipe file path
|
||||
for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
|
||||
if (path.startswith(bl.layer.local_path)):
|
||||
return bl
|
||||
|
||||
else:
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController
|
||||
from bldcontrol.models import BuildRequest
|
||||
bc = getBuildEnvironmentController(pk = be_id)
|
||||
|
||||
def _slkey_managed(layer_version):
|
||||
return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
|
||||
|
||||
# Heuristics: we match the path to where the layers have been checked out
|
||||
for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
|
||||
localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
|
||||
# we get a relative path, unless running in HEAD mode where the path is absolute
|
||||
if not localdirname.startswith("/"):
|
||||
localdirname = os.path.join(bc.be.sourcedir, localdirname)
|
||||
if path.startswith(localdirname):
|
||||
#logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
|
||||
# we matched the BRLayer, but we need the layer_version that generated this br
|
||||
for lvo in self.orm_wrapper.layer_version_objects:
|
||||
if brl.name == lvo.layer.name:
|
||||
return lvo
|
||||
|
||||
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
||||
logger.error("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
|
||||
|
||||
#mockup the new layer
|
||||
unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", local_path="/", layer_index_url="")
|
||||
unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
|
||||
|
||||
return unknown_layer_version_obj
|
||||
#TODO: if we get here, we didn't read layers correctly
|
||||
assert False
|
||||
return None
|
||||
|
||||
def _get_recipe_information_from_taskfile(self, taskfile):
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
@@ -767,25 +599,13 @@ class BuildInfoHelper(object):
|
||||
|
||||
################################
|
||||
## external available methods to store information
|
||||
@staticmethod
|
||||
def _get_data_from_event(event):
|
||||
evdata = None
|
||||
if '_localdata' in vars(event):
|
||||
evdata = event._localdata
|
||||
elif 'data' in vars(event):
|
||||
evdata = event.data
|
||||
else:
|
||||
raise Exception("Event with neither _localdata or data properties")
|
||||
return evdata
|
||||
|
||||
def store_layer_info(self, event):
|
||||
layerinfos = BuildInfoHelper._get_data_from_event(event)
|
||||
assert '_localdata' in vars(event)
|
||||
layerinfos = event._localdata
|
||||
self.internal_state['lvs'] = {}
|
||||
for layer in layerinfos:
|
||||
try:
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
||||
except NotExisting as nee:
|
||||
logger.warn("buildinfohelper: cannot identify layer exception:%s " % nee)
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
|
||||
|
||||
|
||||
def store_started_build(self, event):
|
||||
@@ -797,13 +617,10 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['build'] = build_obj
|
||||
|
||||
# save layer version information for this build
|
||||
if not 'lvs' in self.internal_state:
|
||||
logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
|
||||
else:
|
||||
for layer_obj in self.internal_state['lvs']:
|
||||
self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
|
||||
for layer_obj in self.internal_state['lvs']:
|
||||
self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
|
||||
|
||||
del self.internal_state['lvs']
|
||||
del self.internal_state['lvs']
|
||||
|
||||
# create target information
|
||||
target_information = {}
|
||||
@@ -813,27 +630,20 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
|
||||
|
||||
# Save build configuration
|
||||
data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
|
||||
self.orm_wrapper.save_build_variables(build_obj, data)
|
||||
self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
|
||||
|
||||
return self.brbe
|
||||
|
||||
|
||||
|
||||
def update_target_image_file(self, event):
|
||||
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
output_files = list(evdata.viewkeys())
|
||||
output_files = list(event._localdata.viewkeys())
|
||||
for output in output_files:
|
||||
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
|
||||
|
||||
def update_artifact_image_file(self, event):
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
for artifact_path in evdata.keys():
|
||||
self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
|
||||
if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output])
|
||||
|
||||
def update_build_information(self, event, errors, warnings, taskfailures):
|
||||
if 'build' in self.internal_state:
|
||||
@@ -841,12 +651,12 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_license_manifest_path(self, event):
|
||||
deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
|
||||
image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
|
||||
path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
|
||||
deploy_dir = event._localdata['deploy_dir']
|
||||
image_name = event._localdata['image_name']
|
||||
path = deploy_dir + "/licenses/" + image_name + "/"
|
||||
for target in self.internal_state['targets']:
|
||||
if target.target in image_name:
|
||||
self.orm_wrapper.update_target_set_license_manifest(target, path)
|
||||
self.orm_wrapper.update_target_object(target, path)
|
||||
|
||||
|
||||
def store_started_task(self, event):
|
||||
@@ -890,21 +700,14 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_tasks_stats(self, event):
|
||||
for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
|
||||
for (taskfile, taskname, taskstats, recipename) in event._localdata:
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
assert localfilepath.startswith("/")
|
||||
|
||||
recipe_information = self._get_recipe_information_from_taskfile(taskfile)
|
||||
try:
|
||||
if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
|
||||
recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
|
||||
|
||||
recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
|
||||
recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
|
||||
file_path__endswith = recipe_information['file_path'],
|
||||
name = recipename)
|
||||
except Recipe.DoesNotExist:
|
||||
logger.error("Could not find recipe for recipe_information %s name %s" % (pformat(recipe_information), recipename))
|
||||
raise
|
||||
|
||||
task_information = {}
|
||||
task_information['build'] = self.internal_state['build']
|
||||
@@ -912,8 +715,6 @@ class BuildInfoHelper(object):
|
||||
task_information['task_name'] = taskname
|
||||
task_information['cpu_usage'] = taskstats['cpu_usage']
|
||||
task_information['disk_io'] = taskstats['disk_io']
|
||||
if 'elapsed_time' in taskstats:
|
||||
task_information['elapsed_time'] = taskstats['elapsed_time']
|
||||
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
||||
|
||||
def update_and_store_task(self, event):
|
||||
@@ -967,18 +768,11 @@ class BuildInfoHelper(object):
|
||||
task_information['outcome'] = Task.OUTCOME_FAILED
|
||||
del self.internal_state['taskdata'][identifier]
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
# we force a sync point here, to get the progress bar to show
|
||||
if self.autocommit_step % 3 == 0:
|
||||
transaction.set_autocommit(True)
|
||||
transaction.set_autocommit(False)
|
||||
self.autocommit_step += 1
|
||||
|
||||
self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
||||
|
||||
|
||||
def store_missed_state_tasks(self, event):
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
@@ -996,7 +790,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
self.orm_wrapper.get_update_task_object(task_information)
|
||||
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event._localdata['found']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
@@ -1012,14 +806,15 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_target_package_data(self, event):
|
||||
assert '_localdata' in vars(event)
|
||||
# for all image targets
|
||||
for target in self.internal_state['targets']:
|
||||
if target.is_image:
|
||||
try:
|
||||
pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
|
||||
imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
|
||||
pkgdata = event._localdata['pkgdata']
|
||||
imgdata = event._localdata['imgdata'][target.target]
|
||||
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
|
||||
filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
|
||||
filedata = event._localdata['filedata'][target.target]
|
||||
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
|
||||
except KeyError:
|
||||
# we must have not got the data for this image, nothing to save
|
||||
@@ -1055,29 +850,14 @@ class BuildInfoHelper(object):
|
||||
|
||||
recipe_info = {}
|
||||
recipe_info['name'] = pn
|
||||
recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
|
||||
recipe_info['layer_version'] = layer_version_obj
|
||||
|
||||
if 'version' in event._depgraph['pn'][pn]:
|
||||
recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
|
||||
|
||||
if 'summary' in event._depgraph['pn'][pn]:
|
||||
recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
|
||||
|
||||
if 'license' in event._depgraph['pn'][pn]:
|
||||
recipe_info['license'] = event._depgraph['pn'][pn]['license']
|
||||
|
||||
if 'description' in event._depgraph['pn'][pn]:
|
||||
recipe_info['description'] = event._depgraph['pn'][pn]['description']
|
||||
|
||||
if 'section' in event._depgraph['pn'][pn]:
|
||||
recipe_info['section'] = event._depgraph['pn'][pn]['section']
|
||||
|
||||
if 'homepage' in event._depgraph['pn'][pn]:
|
||||
recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
|
||||
|
||||
if 'bugtracker' in event._depgraph['pn'][pn]:
|
||||
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
||||
|
||||
recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
|
||||
recipe_info['license'] = event._depgraph['pn'][pn]['license']
|
||||
recipe_info['description'] = event._depgraph['pn'][pn]['description']
|
||||
recipe_info['section'] = event._depgraph['pn'][pn]['section']
|
||||
recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
|
||||
recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
|
||||
recipe_info['file_path'] = file_name
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
|
||||
recipe.is_image = False
|
||||
@@ -1099,22 +879,20 @@ class BuildInfoHelper(object):
|
||||
|
||||
# save recipe dependency
|
||||
# buildtime
|
||||
recipedeps_objects = []
|
||||
for recipe in event._depgraph['depends']:
|
||||
try:
|
||||
target = self.internal_state['recipes'][recipe]
|
||||
for dep in event._depgraph['depends'][recipe]:
|
||||
dependency = self.internal_state['recipes'][dep]
|
||||
recipedeps_objects.append(Recipe_Dependency( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
|
||||
Recipe_Dependency.objects.get_or_create( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
|
||||
except KeyError as e:
|
||||
if e not in assume_provided and not str(e).startswith("virtual/"):
|
||||
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
|
||||
Recipe_Dependency.objects.bulk_create(recipedeps_objects)
|
||||
|
||||
# save all task information
|
||||
def _save_a_task(taskdesc):
|
||||
spec = re.split(r'\.', taskdesc)
|
||||
spec = re.split(r'\.', taskdesc);
|
||||
pn = ".".join(spec[0:-1])
|
||||
taskname = spec[-1]
|
||||
e = event
|
||||
@@ -1131,7 +909,6 @@ class BuildInfoHelper(object):
|
||||
tasks[taskdesc] = _save_a_task(taskdesc)
|
||||
|
||||
# create dependencies between tasks
|
||||
taskdeps_objects = []
|
||||
for taskdesc in event._depgraph['tdepends']:
|
||||
target = tasks[taskdesc]
|
||||
for taskdep in event._depgraph['tdepends'][taskdesc]:
|
||||
@@ -1140,32 +917,29 @@ class BuildInfoHelper(object):
|
||||
dep = _save_a_task(taskdep)
|
||||
else:
|
||||
dep = tasks[taskdep]
|
||||
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
|
||||
Task_Dependency.objects.bulk_create(taskdeps_objects)
|
||||
Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
logger.warn("buildinfohelper: dependency info not identify recipes: \n%s" % errormsg)
|
||||
raise Exception(errormsg)
|
||||
|
||||
|
||||
def store_build_package_information(self, event):
|
||||
package_info = BuildInfoHelper._get_data_from_event(event)
|
||||
assert '_localdata' in vars(event)
|
||||
package_info = event._localdata
|
||||
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
|
||||
package_info,
|
||||
self.internal_state['recipes'],
|
||||
)
|
||||
|
||||
def _store_build_done(self, errorcode):
|
||||
def _store_build_done(self):
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
be.lock = BuildEnvironment.LOCK_LOCK
|
||||
be.save()
|
||||
br = BuildRequest.objects.get(pk = br_id)
|
||||
if errorcode == 0:
|
||||
# request archival of the project artifacts
|
||||
br.state = BuildRequest.REQ_ARCHIVE
|
||||
else:
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.state = BuildRequest.REQ_COMPLETED
|
||||
br.build = self.internal_state['build']
|
||||
br.save()
|
||||
|
||||
|
||||
@@ -1173,19 +947,8 @@ class BuildInfoHelper(object):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = format.ERROR
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = '-- None'
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
def store_log_exception(self, text, backtrace = ""):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = -1
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = backtrace
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
|
||||
def store_log_event(self, event):
|
||||
if event.levelno < format.WARNING:
|
||||
return
|
||||
@@ -1200,20 +963,18 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['backlog'].append(event)
|
||||
else: # we're under Toaster control, post the errors to the build request
|
||||
from bldcontrol.models import BuildRequest, BRError
|
||||
br, be = self.brbe.split(":")
|
||||
br, be = brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
|
||||
|
||||
return
|
||||
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
# if we have a backlog of events, do our best to save them here
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
|
||||
print " Saving stored event ", tempevent
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
|
||||
del self.internal_state['backlog']
|
||||
|
||||
log_information = {}
|
||||
@@ -1222,8 +983,6 @@ class BuildInfoHelper(object):
|
||||
log_information['level'] = LogMessage.ERROR
|
||||
elif event.levelno == format.WARNING:
|
||||
log_information['level'] = LogMessage.WARNING
|
||||
elif event.levelno == -1: # toaster self-logging
|
||||
log_information['level'] = -1
|
||||
else:
|
||||
log_information['level'] = LogMessage.INFO
|
||||
|
||||
@@ -1232,19 +991,10 @@ class BuildInfoHelper(object):
|
||||
log_information['lineno'] = event.lineno
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def close(self, errorcode):
|
||||
def close(self):
|
||||
if self.brbe is not None:
|
||||
self._store_build_done(errorcode)
|
||||
buildinfohelper._store_build_done()
|
||||
|
||||
if 'backlog' in self.internal_state:
|
||||
if 'build' in self.internal_state:
|
||||
# we save missed events in the database for the current build
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
# we have no build, and we still have events; something amazingly wrong happend
|
||||
for event in self.internal_state['backlog']:
|
||||
logger.error("UNSAVED log: %s", event.msg)
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
for event in self.internal_state['backlog']:
|
||||
print "NOTE: Unsaved log: ", event.msg
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import gobject
|
||||
import gtk
|
||||
import Queue
|
||||
@@ -216,12 +215,6 @@ def main(server, eventHandler, params):
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
try:
|
||||
gtk.init_check()
|
||||
except RuntimeError:
|
||||
sys.stderr.write("Please set DISPLAY variable before running this command \n")
|
||||
return
|
||||
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
@@ -243,7 +236,7 @@ def main(server, eventHandler, params):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
break
|
||||
|
||||
@@ -271,7 +271,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
server.terminateServer()
|
||||
return
|
||||
|
||||
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
|
||||
if consolelogfile and not params.options.show_environment:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
@@ -284,7 +284,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if not params.observe_only:
|
||||
params.updateFromServer(server)
|
||||
params.updateToServer(server, os.environ.copy())
|
||||
params.updateToServer(server)
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
|
||||
@@ -361,13 +361,13 @@ class NCursesUI:
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
def main(server, eventHandler):
|
||||
if not os.isatty(sys.stdout.fileno()):
|
||||
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
|
||||
return
|
||||
ui = NCursesUI()
|
||||
try:
|
||||
curses.wrapper(ui.main, server, eventHandler, params)
|
||||
curses.wrapper(ui.main, server, eventHandler)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -41,7 +41,7 @@ import sys
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
interactive = sys.stdout.isatty()
|
||||
@@ -58,15 +58,19 @@ def _log_settings_from_server(server):
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
|
||||
raise BaseException(error)
|
||||
return includelogs, loglines, consolelogfile
|
||||
|
||||
return includelogs, loglines
|
||||
|
||||
def main(server, eventHandler, params ):
|
||||
|
||||
includelogs, loglines = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
@@ -76,16 +80,6 @@ def main(server, eventHandler, params ):
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
|
||||
includelogs, loglines, consolelogfile = _log_settings_from_server(server)
|
||||
|
||||
# verify and warn
|
||||
build_history_enabled = True
|
||||
inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
|
||||
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
if not params.observe_only:
|
||||
logger.error("ToasterUI can only work in observer mode")
|
||||
return
|
||||
@@ -101,16 +95,6 @@ def main(server, eventHandler, params ):
|
||||
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
if buildinfohelper.brbe is not None and consolelogfile:
|
||||
# if we are under managed mode we have no other UI and we need to write our own file
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
@@ -130,12 +114,8 @@ def main(server, eventHandler, params ):
|
||||
|
||||
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
|
||||
buildinfohelper.update_and_store_task(event)
|
||||
logger.warn("Logfile for task %s" % event.logfile)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.build.TaskBase):
|
||||
logger.info(event._message)
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
@@ -181,12 +161,7 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
continue
|
||||
if isinstance(event, bb.event.MultipleProviders):
|
||||
logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
|
||||
event._item,
|
||||
", ".join(event._candidates))
|
||||
logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.NoProvider):
|
||||
return_value = 1
|
||||
errors = errors + 1
|
||||
@@ -244,7 +219,6 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = "Command Failed " + event.error
|
||||
@@ -252,21 +226,18 @@ def main(server, eventHandler, params ):
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
logger.error("Command execution failed: %s", event.error)
|
||||
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done")
|
||||
buildinfohelper.close()
|
||||
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
|
||||
logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
|
||||
print "we are under BuildEnvironment management - after the build, we exit"
|
||||
server.terminateServer()
|
||||
else:
|
||||
logger.debug(1, "ToasterUI prepared for new build")
|
||||
print "prepared for new build"
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
@@ -287,12 +258,8 @@ def main(server, eventHandler, params ):
|
||||
buildinfohelper.store_missed_state_tasks(event)
|
||||
elif event.type == "ImageFileSize":
|
||||
buildinfohelper.update_target_image_file(event)
|
||||
elif event.type == "ArtifactFileSize":
|
||||
buildinfohelper.update_artifact_image_file(event)
|
||||
elif event.type == "LicenseManifestPath":
|
||||
buildinfohelper.store_license_manifest_path(event)
|
||||
else:
|
||||
logger.error("Unprocessed MetadataEvent %s " % str(event))
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
@@ -325,25 +292,9 @@ def main(server, eventHandler, params ):
|
||||
main.shutdown = 1
|
||||
pass
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
logger.error(e)
|
||||
import traceback
|
||||
from pprint import pformat
|
||||
exception_data = traceback.format_exc()
|
||||
logger.error("%s\n%s" % (e, exception_data))
|
||||
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
if tb is not None:
|
||||
curr = tb
|
||||
while curr is not None:
|
||||
logger.warn("Error data dump %s\n%s\n" % (traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals)))
|
||||
curr = curr.tb_next
|
||||
|
||||
# save them to database, if possible; if it fails, we already logged to console.
|
||||
try:
|
||||
buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
|
||||
except Exception as ce:
|
||||
logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
|
||||
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
if interrupted:
|
||||
|
||||
@@ -106,12 +106,7 @@ class BBUIEventQueue:
|
||||
|
||||
self.server.timeout = 1
|
||||
while not self.server.quit:
|
||||
try:
|
||||
self.server.handle_request()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
|
||||
|
||||
self.server.handle_request()
|
||||
self.server.server_close()
|
||||
|
||||
def system_quit( self ):
|
||||
|
||||
@@ -54,9 +54,6 @@ def set_context(ctx):
|
||||
# Context used in better_exec, eval
|
||||
_context = clean_context()
|
||||
|
||||
class VersionStringException(Exception):
|
||||
"""Exception raised when an invalid version specification is found"""
|
||||
|
||||
def explode_version(s):
|
||||
r = []
|
||||
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
|
||||
@@ -132,28 +129,6 @@ def vercmp_string(a, b):
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
|
||||
def vercmp_string_op(a, b, op):
|
||||
"""
|
||||
Compare two versions and check if the specified comparison operator matches the result of the comparison.
|
||||
This function is fairly liberal about what operators it will accept since there are a variety of styles
|
||||
depending on the context.
|
||||
"""
|
||||
res = vercmp_string(a, b)
|
||||
if op in ('=', '=='):
|
||||
return res == 0
|
||||
elif op == '<=':
|
||||
return res <= 0
|
||||
elif op == '>=':
|
||||
return res >= 0
|
||||
elif op in ('>', '>>'):
|
||||
return res > 0
|
||||
elif op in ('<', '<<'):
|
||||
return res < 0
|
||||
elif op == '!=':
|
||||
return res != 0
|
||||
else:
|
||||
raise VersionStringException('Unsupported comparison operator "%s"' % op)
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
@@ -214,7 +189,6 @@ def explode_dep_versions2(s):
|
||||
i = i[1:]
|
||||
else:
|
||||
# This is an unsupported case!
|
||||
raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
|
||||
lastcmp = (i or "")
|
||||
i = ""
|
||||
i.strip()
|
||||
@@ -569,7 +543,7 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if removed_vars:
|
||||
if len(removed_vars):
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
@@ -961,184 +935,3 @@ def multiprocessingpool(*args, **kwargs):
|
||||
|
||||
return multiprocessing.Pool(*args, **kwargs)
|
||||
|
||||
def exec_flat_python_func(func, *args, **kwargs):
|
||||
"""Execute a flat python function (defined with def funcname(args):...)"""
|
||||
# Prepare a small piece of python code which calls the requested function
|
||||
# To do this we need to prepare two things - a set of variables we can use to pass
|
||||
# the values of arguments into the calling function, and the list of arguments for
|
||||
# the function being called
|
||||
context = {}
|
||||
funcargs = []
|
||||
# Handle unnamed arguments
|
||||
aidx = 1
|
||||
for arg in args:
|
||||
argname = 'arg_%s' % aidx
|
||||
context[argname] = arg
|
||||
funcargs.append(argname)
|
||||
aidx += 1
|
||||
# Handle keyword arguments
|
||||
context.update(kwargs)
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
|
||||
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
|
||||
comp = bb.utils.better_compile(code, '<string>', '<string>')
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
return context['retval']
|
||||
|
||||
def edit_metadata_file(meta_file, variables, func):
|
||||
"""Edit a recipe or config file and modify one or more specified
|
||||
variable values set in the file using a specified callback function.
|
||||
The file is only written to if the value(s) actually change.
|
||||
"""
|
||||
var_res = {}
|
||||
for var in variables:
|
||||
var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
|
||||
|
||||
updated = False
|
||||
varset_start = ''
|
||||
varlines = []
|
||||
newlines = []
|
||||
in_var = None
|
||||
full_value = ''
|
||||
|
||||
def handle_var_end():
|
||||
(newvalue, indent, minbreak) = func(in_var, full_value)
|
||||
if newvalue != full_value:
|
||||
if isinstance(newvalue, list):
|
||||
intentspc = ' ' * indent
|
||||
if minbreak:
|
||||
# First item on first line
|
||||
if len(newvalue) == 1:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
|
||||
else:
|
||||
newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
|
||||
for item in newvalue[1:]:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % indentspc)
|
||||
else:
|
||||
# No item on first line
|
||||
newlines.append('%s " \\\n' % varset_start)
|
||||
for item in newvalue:
|
||||
newlines.append('%s%s \\\n' % (intentspc, item))
|
||||
newlines.append('%s"\n' % intentspc)
|
||||
else:
|
||||
newlines.append('%s "%s"\n' % (varset_start, newvalue))
|
||||
return True
|
||||
else:
|
||||
# Put the old lines back where they were
|
||||
newlines.extend(varlines)
|
||||
return False
|
||||
|
||||
with open(meta_file, 'r') as f:
|
||||
for line in f:
|
||||
if in_var:
|
||||
value = line.rstrip()
|
||||
varlines.append(line)
|
||||
full_value += value[:-1]
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
in_var = None
|
||||
else:
|
||||
matched = False
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
if var_re.match(line):
|
||||
splitvalue = line.split('"', 1)
|
||||
varset_start = splitvalue[0].rstrip()
|
||||
value = splitvalue[1].rstrip()
|
||||
if value.endswith('\\'):
|
||||
value = value[:-1]
|
||||
full_value = value
|
||||
varlines = [line]
|
||||
in_var = varname
|
||||
if value.endswith('"') or value.endswith("'"):
|
||||
full_value = full_value[:-1]
|
||||
if handle_var_end():
|
||||
updated = True
|
||||
in_var = None
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
newlines.append(line)
|
||||
if updated:
|
||||
with open(meta_file, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
|
||||
def edit_bblayers_conf(bblayers_conf, add, remove):
|
||||
"""Edit bblayers.conf, adding and/or removing layers"""
|
||||
|
||||
import fnmatch
|
||||
|
||||
def remove_trailing_sep(pth):
|
||||
if pth and pth[-1] == os.sep:
|
||||
pth = pth[:-1]
|
||||
return pth
|
||||
|
||||
def layerlist_param(value):
|
||||
if not value:
|
||||
return []
|
||||
elif isinstance(value, list):
|
||||
return [remove_trailing_sep(x) for x in value]
|
||||
else:
|
||||
return [remove_trailing_sep(value)]
|
||||
|
||||
notadded = []
|
||||
notremoved = []
|
||||
|
||||
addlayers = layerlist_param(add)
|
||||
removelayers = layerlist_param(remove)
|
||||
|
||||
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
|
||||
bblayercalls = []
|
||||
|
||||
def handle_bblayers(varname, origvalue):
|
||||
bblayercalls.append(varname)
|
||||
updated = False
|
||||
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
|
||||
if removelayers:
|
||||
for removelayer in removelayers:
|
||||
matched = False
|
||||
for layer in bblayers:
|
||||
if fnmatch.fnmatch(layer, removelayer):
|
||||
updated = True
|
||||
matched = True
|
||||
bblayers.remove(layer)
|
||||
break
|
||||
if not matched:
|
||||
notremoved.append(removelayer)
|
||||
if addlayers:
|
||||
for addlayer in addlayers:
|
||||
if addlayer not in bblayers:
|
||||
updated = True
|
||||
bblayers.append(addlayer)
|
||||
else:
|
||||
notadded.append(addlayer)
|
||||
|
||||
if updated:
|
||||
return (bblayers, 2, False)
|
||||
else:
|
||||
return (origvalue, 2, False)
|
||||
|
||||
edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
|
||||
|
||||
if not bblayercalls:
|
||||
raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
|
||||
|
||||
return (notadded, notremoved)
|
||||
|
||||
|
||||
def get_file_layer(filename, d):
|
||||
"""Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
|
||||
collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split()
|
||||
collection_res = {}
|
||||
for collection in collections:
|
||||
collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or ''
|
||||
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, filename):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
return match
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
Behold, mortal, the origins of Beautiful Soup...
|
||||
================================================
|
||||
|
||||
Leonard Richardson is the primary programmer.
|
||||
|
||||
Aaron DeVore is awesome.
|
||||
|
||||
Mark Pilgrim provided the encoding detection code that forms the base
|
||||
of UnicodeDammit.
|
||||
|
||||
Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
|
||||
Soup 4 working under Python 3.
|
||||
|
||||
Simon Willison wrote soupselect, which was used to make Beautiful Soup
|
||||
support CSS selectors.
|
||||
|
||||
Sam Ruby helped with a lot of edge cases.
|
||||
|
||||
Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
|
||||
work in solving the nestable tags conundrum.
|
||||
|
||||
An incomplete list of people have contributed patches to Beautiful
|
||||
Soup:
|
||||
|
||||
Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
|
||||
Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
|
||||
Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
|
||||
Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
|
||||
Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
|
||||
Samastur, Jouni Sepp<70>nen, Alexander Schmolck, Andy Theyers, Glyn
|
||||
Webster, Paul Wright, Danny Yoo
|
||||
|
||||
An incomplete list of people who made suggestions or found bugs or
|
||||
found ways to break Beautiful Soup:
|
||||
|
||||
Hanno B<>ck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
|
||||
Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
|
||||
Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
|
||||
warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
|
||||
Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
|
||||
Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
|
||||
Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
|
||||
Sousa Rocha, Yichun Wei, Per Vognsen
|
||||
@@ -1,26 +0,0 @@
|
||||
Beautiful Soup is made available under the MIT license:
|
||||
|
||||
Copyright (c) 2004-2012 Leonard Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE, DAMMIT.
|
||||
|
||||
Beautiful Soup incorporates code from the html5lib library, which is
|
||||
also made available under the MIT license.
|
||||
@@ -1,406 +0,0 @@
|
||||
"""Beautiful Soup
|
||||
Elixir and Tonic
|
||||
"The Screen-Scraper's Friend"
|
||||
http://www.crummy.com/software/BeautifulSoup/
|
||||
|
||||
Beautiful Soup uses a pluggable XML or HTML parser to parse a
|
||||
(possibly invalid) document into a tree representation. Beautiful Soup
|
||||
provides provides methods and Pythonic idioms that make it easy to
|
||||
navigate, search, and modify the parse tree.
|
||||
|
||||
Beautiful Soup works with Python 2.6 and up. It works better if lxml
|
||||
and/or html5lib is installed.
|
||||
|
||||
For more than you ever wanted to know about Beautiful Soup, see the
|
||||
documentation:
|
||||
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .builder import builder_registry, ParserRejectedMarkup
|
||||
from .dammit import UnicodeDammit
|
||||
from .element import (
|
||||
CData,
|
||||
Comment,
|
||||
DEFAULT_OUTPUT_ENCODING,
|
||||
Declaration,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
PageElement,
|
||||
ProcessingInstruction,
|
||||
ResultSet,
|
||||
SoupStrainer,
|
||||
Tag,
|
||||
)
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
This class defines the basic interface called by the tree builders.
|
||||
|
||||
These methods will be called by the parser:
|
||||
reset()
|
||||
feed(markup)
|
||||
|
||||
The tree builder may call these methods from its feed() implementation:
|
||||
handle_starttag(name, attrs) # See note about return value
|
||||
handle_endtag(name)
|
||||
handle_data(data) # Appends to the current data node
|
||||
endData(containerClass=NavigableString) # Ends the current data node
|
||||
|
||||
No matter how complicated the underlying parser is, you should be
|
||||
able to build a tree using 'start tag' events, 'end tag' events,
|
||||
'data' events, and "done with data" events.
|
||||
|
||||
If you encounter an empty-element tag (aka a self-closing tag,
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
|
||||
if 'convertEntities' in kwargs:
|
||||
warnings.warn(
|
||||
"BS4 does not respect the convertEntities argument to the "
|
||||
"BeautifulSoup constructor. Entities are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'markupMassage' in kwargs:
|
||||
del kwargs['markupMassage']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the markupMassage argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for any necessary markup massage.")
|
||||
|
||||
if 'smartQuotesTo' in kwargs:
|
||||
del kwargs['smartQuotesTo']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the smartQuotesTo argument to the "
|
||||
"BeautifulSoup constructor. Smart quotes are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'selfClosingTags' in kwargs:
|
||||
del kwargs['selfClosingTags']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the selfClosingTags argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for understanding self-closing tags.")
|
||||
|
||||
if 'isHTML' in kwargs:
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
warnings.warn(
|
||||
'The "%s" argument to the BeautifulSoup constructor '
|
||||
'has been renamed to "%s."' % (old_name, new_name))
|
||||
value = kwargs[old_name]
|
||||
del kwargs[old_name]
|
||||
return value
|
||||
return None
|
||||
|
||||
parse_only = parse_only or deprecated_argument(
|
||||
"parseOnlyThese", "parse_only")
|
||||
|
||||
from_encoding = from_encoding or deprecated_argument(
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
builder_class = builder_registry.lookup(*features)
|
||||
if builder_class is None:
|
||||
raise FeatureNotFound(
|
||||
"Couldn't find a tree builder with the features you "
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
|
||||
self.parse_only = parse_only
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
elif len(markup) <= 256:
|
||||
# Print out warnings for a couple beginner problems
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
possible_filename = markup
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
break
|
||||
except ParserRejectedMarkup:
|
||||
pass
|
||||
|
||||
# Clear out the markup and remove the builder's circular
|
||||
# reference to this object.
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
|
||||
self.builder.feed(self.markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
self.popTag()
|
||||
|
||||
def reset(self):
|
||||
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
self.builder.reset()
|
||||
self.current_data = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
self.preserve_whitespace_tag_stack = []
|
||||
self.pushTag(self)
|
||||
|
||||
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
|
||||
"""Create a new tag associated with this soup."""
|
||||
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
|
||||
def insert_after(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
|
||||
|
||||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
|
||||
self.preserve_whitespace_tag_stack.pop()
|
||||
#print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
self.currentTag = self.tagStack[-1]
|
||||
if tag.name in self.builder.preserve_whitespace_tags:
|
||||
self.preserve_whitespace_tag_stack.append(tag)
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
if not self.preserve_whitespace_tag_stack:
|
||||
strippable = True
|
||||
for i in current_data:
|
||||
if i not in self.ASCII_SPACES:
|
||||
strippable = False
|
||||
break
|
||||
if strippable:
|
||||
if '\n' in current_data:
|
||||
current_data = '\n'
|
||||
else:
|
||||
current_data = ' '
|
||||
|
||||
# Reset the data collector.
|
||||
self.current_data = []
|
||||
|
||||
# Should we add this string to the tree at all?
|
||||
if self.parse_only and len(self.tagStack) <= 1 and \
|
||||
(not self.parse_only.text or \
|
||||
not self.parse_only.search(current_data)):
|
||||
return
|
||||
|
||||
o = containerClass(current_data)
|
||||
self.object_was_parsed(o)
|
||||
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
# The BeautifulSoup object itself can never be popped.
|
||||
return
|
||||
|
||||
most_recently_popped = None
|
||||
|
||||
stack_size = len(self.tagStack)
|
||||
for i in range(stack_size - 1, 0, -1):
|
||||
t = self.tagStack[i]
|
||||
if (name == t.name and nsprefix == t.prefix):
|
||||
if inclusivePop:
|
||||
most_recently_popped = self.popTag()
|
||||
break
|
||||
most_recently_popped = self.popTag()
|
||||
|
||||
return most_recently_popped
|
||||
|
||||
def handle_starttag(self, name, namespace, nsprefix, attrs):
|
||||
"""Push a start tag on to the stack.
|
||||
|
||||
If this method returns None, the tag was rejected by the
|
||||
SoupStrainer. You should proceed as if the tag had not occured
|
||||
in the document. For instance, if this was a self-closing tag,
|
||||
don't call handle_endtag.
|
||||
"""
|
||||
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
self.endData()
|
||||
|
||||
if (self.parse_only and len(self.tagStack) <= 1
|
||||
and (self.parse_only.text
|
||||
or not self.parse_only.search_tag(name, attrs))):
|
||||
return None
|
||||
|
||||
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
|
||||
self.currentTag, self._most_recent_element)
|
||||
if tag is None:
|
||||
return tag
|
||||
if self._most_recent_element:
|
||||
self._most_recent_element.next_element = tag
|
||||
self._most_recent_element = tag
|
||||
self.pushTag(tag)
|
||||
return tag
|
||||
|
||||
def handle_endtag(self, name, nsprefix=None):
|
||||
#print "End tag: " + name
|
||||
self.endData()
|
||||
self._popToTag(name, nsprefix)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.current_data.append(data)
|
||||
|
||||
def decode(self, pretty_print=False,
|
||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Returns a string or Unicode representation of this document.
|
||||
To get Unicode, pass None for encoding."""
|
||||
|
||||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
indent_level = 0
|
||||
return prefix + super(BeautifulSoup, self).decode(
|
||||
indent_level, eventual_encoding, formatter)
|
||||
|
||||
# Alias to make it easier to type import: 'from bs4 import _soup'
|
||||
_s = BeautifulSoup
|
||||
_soup = BeautifulSoup
|
||||
|
||||
class BeautifulStoneSoup(BeautifulSoup):
|
||||
"""Deprecated interface to an XML parser."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['features'] = 'xml'
|
||||
warnings.warn(
|
||||
'The BeautifulStoneSoup class is deprecated. Instead of using '
|
||||
'it, pass features="xml" into the BeautifulSoup constructor.')
|
||||
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopParsing(Exception):
|
||||
pass
|
||||
|
||||
class FeatureNotFound(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
@@ -1,321 +0,0 @@
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import sys
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
whitespace_re
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'HTMLTreeBuilder',
|
||||
'SAXTreeBuilder',
|
||||
'TreeBuilder',
|
||||
'TreeBuilderRegistry',
|
||||
]
|
||||
|
||||
# Some useful features for a TreeBuilder to have.
|
||||
FAST = 'fast'
|
||||
PERMISSIVE = 'permissive'
|
||||
STRICT = 'strict'
|
||||
XML = 'xml'
|
||||
HTML = 'html'
|
||||
HTML_5 = 'html5'
|
||||
|
||||
|
||||
class TreeBuilderRegistry(object):
|
||||
|
||||
def __init__(self):
|
||||
self.builders_for_feature = defaultdict(list)
|
||||
self.builders = []
|
||||
|
||||
def register(self, treebuilder_class):
|
||||
"""Register a treebuilder based on its advertised features."""
|
||||
for feature in treebuilder_class.features:
|
||||
self.builders_for_feature[feature].insert(0, treebuilder_class)
|
||||
self.builders.insert(0, treebuilder_class)
|
||||
|
||||
def lookup(self, *features):
|
||||
if len(self.builders) == 0:
|
||||
# There are no builders at all.
|
||||
return None
|
||||
|
||||
if len(features) == 0:
|
||||
# They didn't ask for any features. Give them the most
|
||||
# recently registered builder.
|
||||
return self.builders[0]
|
||||
|
||||
# Go down the list of features in order, and eliminate any builders
|
||||
# that don't match every feature.
|
||||
features = list(features)
|
||||
features.reverse()
|
||||
candidates = None
|
||||
candidate_set = None
|
||||
while len(features) > 0:
|
||||
feature = features.pop()
|
||||
we_have_the_feature = self.builders_for_feature.get(feature, [])
|
||||
if len(we_have_the_feature) > 0:
|
||||
if candidates is None:
|
||||
candidates = we_have_the_feature
|
||||
candidate_set = set(candidates)
|
||||
else:
|
||||
# Eliminate any candidates that don't have this feature.
|
||||
candidate_set = candidate_set.intersection(
|
||||
set(we_have_the_feature))
|
||||
|
||||
# The only valid candidates are the ones in candidate_set.
|
||||
# Go through the original list of candidates and pick the first one
|
||||
# that's in candidate_set.
|
||||
if candidate_set is None:
|
||||
return None
|
||||
for candidate in candidates:
|
||||
if candidate in candidate_set:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
# The BeautifulSoup class will take feature lists from developers and use them
|
||||
# to look up builders in this registry.
|
||||
builder_registry = TreeBuilderRegistry()
|
||||
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
|
||||
# A value for these tag/attribute combinations is a space- or
|
||||
# comma-separated list of CDATA, rather than a single CDATA.
|
||||
cdata_list_attributes = {}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.soup = None
|
||||
|
||||
def reset(self):
|
||||
pass
|
||||
|
||||
def can_be_empty_element(self, tag_name):
|
||||
"""Might a tag with this name be an empty-element tag?
|
||||
|
||||
The final markup may or may not actually present this tag as
|
||||
self-closing.
|
||||
|
||||
For instance: an HTMLBuilder does not consider a <p> tag to be
|
||||
an empty-element tag (it's not in
|
||||
HTMLBuilder.empty_element_tags). This means an empty <p> tag
|
||||
will be presented as "<p></p>", not "<p />".
|
||||
|
||||
The default implementation has no opinion about which tags are
|
||||
empty-element tags, so a tag will be presented as an
|
||||
empty-element tag if and only if it has no contents.
|
||||
"<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
|
||||
be left alone.
|
||||
"""
|
||||
if self.empty_element_tags is None:
|
||||
return True
|
||||
return tag_name in self.empty_element_tags
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
return markup, None, None, False
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""Wrap an HTML fragment to make it look like a document.
|
||||
|
||||
Different parsers do this differently. For instance, lxml
|
||||
introduces an empty <head> tag, and html5lib
|
||||
doesn't. Abstracting this away lets us write simple tests
|
||||
which run HTML fragments through the parser and compare the
|
||||
results against other HTML fragments.
|
||||
|
||||
This method should not be used outside of tests.
|
||||
"""
|
||||
return fragment
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
return False
|
||||
|
||||
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
|
||||
"""Replaces class="foo bar" with class=["foo", "bar"]
|
||||
|
||||
Modifies its input in place.
|
||||
"""
|
||||
if not attrs:
|
||||
return attrs
|
||||
if self.cdata_list_attributes:
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
# for the same tag when rearranging the parse
|
||||
# tree. On the second call the attribute value
|
||||
# here is already a list. If this happens,
|
||||
# leave the value alone rather than trying to
|
||||
# split it again.
|
||||
values = value
|
||||
attrs[attr] = values
|
||||
return attrs
|
||||
|
||||
class SAXTreeBuilder(TreeBuilder):
|
||||
"""A Beautiful Soup treebuilder that listens for SAX events."""
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
attrs = dict((key[1], value) for key, value in list(attrs.items()))
|
||||
#print "Start %s, %r" % (name, attrs)
|
||||
self.soup.handle_starttag(name, attrs)
|
||||
|
||||
def endElement(self, name):
|
||||
#print "End %s" % name
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def startElementNS(self, nsTuple, nodeName, attrs):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.startElement(nodeName, attrs)
|
||||
|
||||
def endElementNS(self, nsTuple, nodeName):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.endElement(nodeName)
|
||||
#handler.endElementNS((ns, node.nodeName), node.nodeName)
|
||||
|
||||
def startPrefixMapping(self, prefix, nodeValue):
|
||||
# Ignore the prefix for now.
|
||||
pass
|
||||
|
||||
def endPrefixMapping(self, prefix):
|
||||
# Ignore the prefix for now.
|
||||
# handler.endPrefixMapping(prefix)
|
||||
pass
|
||||
|
||||
def characters(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def startDocument(self):
|
||||
pass
|
||||
|
||||
def endDocument(self):
|
||||
pass
|
||||
|
||||
|
||||
class HTMLTreeBuilder(TreeBuilder):
|
||||
"""This TreeBuilder knows facts about HTML.
|
||||
|
||||
Such as which tags are empty-element tags.
|
||||
"""
|
||||
|
||||
preserve_whitespace_tags = set(['pre', 'textarea'])
|
||||
empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
|
||||
'spacer', 'link', 'frame', 'base'])
|
||||
|
||||
# The HTML standard defines these attributes as containing a
|
||||
# space-separated list of values, not a single value. That is,
|
||||
# class="foo bar" means that the 'class' attribute has two values,
|
||||
# 'foo' and 'bar', not the single value 'foo bar'. When we
|
||||
# encounter one of these attributes, we will parse its value into
|
||||
# a list of values if possible. Upon output, the list will be
|
||||
# converted back into a string.
|
||||
cdata_list_attributes = {
|
||||
"*" : ['class', 'accesskey', 'dropzone'],
|
||||
"a" : ['rel', 'rev'],
|
||||
"link" : ['rel', 'rev'],
|
||||
"td" : ["headers"],
|
||||
"th" : ["headers"],
|
||||
"td" : ["headers"],
|
||||
"form" : ["accept-charset"],
|
||||
"object" : ["archive"],
|
||||
|
||||
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
|
||||
"area" : ["rel"],
|
||||
"icon" : ["sizes"],
|
||||
"iframe" : ["sandbox"],
|
||||
"output" : ["for"],
|
||||
}
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
# We are only interested in <meta> tags
|
||||
if tag.name != 'meta':
|
||||
return False
|
||||
|
||||
http_equiv = tag.get('http-equiv')
|
||||
content = tag.get('content')
|
||||
charset = tag.get('charset')
|
||||
|
||||
# We are interested in <meta> tags that say what encoding the
|
||||
# document was originally in. This means HTML 5-style <meta>
|
||||
# tags that provide the "charset" attribute. It also means
|
||||
# HTML 4-style <meta> tags that provide the "content"
|
||||
# attribute and have "http-equiv" set to "content-type".
|
||||
#
|
||||
# In both cases we will replace the value of the appropriate
|
||||
# attribute with a standin object that can take on any
|
||||
# encoding.
|
||||
meta_encoding = None
|
||||
if charset is not None:
|
||||
# HTML 5 style:
|
||||
# <meta charset="utf8">
|
||||
meta_encoding = charset
|
||||
tag['charset'] = CharsetMetaAttributeValue(charset)
|
||||
|
||||
elif (content is not None and http_equiv is not None
|
||||
and http_equiv.lower() == 'content-type'):
|
||||
# HTML 4 style:
|
||||
# <meta http-equiv="content-type" content="text/html; charset=utf8">
|
||||
tag['content'] = ContentMetaAttributeValue(content)
|
||||
|
||||
return (meta_encoding is not None)
|
||||
|
||||
def register_treebuilders_from(module):
|
||||
"""Copy TreeBuilders from the given module into this module."""
|
||||
# I'm fairly sure this is not the best way to do this.
|
||||
this_module = sys.modules['bs4.builder']
|
||||
for name in module.__all__:
|
||||
obj = getattr(module, name)
|
||||
|
||||
if issubclass(obj, TreeBuilder):
|
||||
setattr(this_module, name, obj)
|
||||
this_module.__all__.append(name)
|
||||
# Register the builder while we're at it.
|
||||
this_module.builder_registry.register(obj)
|
||||
|
||||
class ParserRejectedMarkup(Exception):
|
||||
pass
|
||||
|
||||
# Builders are registered in reverse order of priority, so that custom
|
||||
# builder registrations will take precedence. In general, we want lxml
|
||||
# to take precedence over html5lib, because it's faster. And we only
|
||||
# want to use HTMLParser as a last result.
|
||||
from . import _htmlparser
|
||||
register_treebuilders_from(_htmlparser)
|
||||
try:
|
||||
from . import _html5lib
|
||||
register_treebuilders_from(_html5lib)
|
||||
except ImportError:
|
||||
# They don't have html5lib installed.
|
||||
pass
|
||||
try:
|
||||
from . import _lxml
|
||||
register_treebuilders_from(_lxml)
|
||||
except ImportError:
|
||||
# They don't have lxml installed.
|
||||
pass
|
||||
@@ -1,285 +0,0 @@
|
||||
__all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
HTML,
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
from html5lib.constants import namespaces
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
Tag,
|
||||
)
|
||||
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
def feed(self, markup):
|
||||
if self.soup.parse_only is not None:
|
||||
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
|
||||
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
else:
|
||||
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
|
||||
|
||||
def create_treebuilder(self, namespaceHTMLElements):
|
||||
self.underlying_builder = TreeBuilderForHtml5lib(
|
||||
self.soup, namespaceHTMLElements)
|
||||
return self.underlying_builder
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
|
||||
|
||||
def documentClass(self):
|
||||
self.soup.reset()
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def insertDoctype(self, token):
|
||||
name = token["name"]
|
||||
publicId = token["publicId"]
|
||||
systemId = token["systemId"]
|
||||
|
||||
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def elementClass(self, name, namespace):
|
||||
tag = self.soup.new_tag(name, namespace)
|
||||
return Element(tag, self.soup, namespace)
|
||||
|
||||
def commentClass(self, data):
|
||||
return TextNode(Comment(data), self.soup)
|
||||
|
||||
def fragmentClass(self):
|
||||
self.soup = BeautifulSoup("")
|
||||
self.soup.name = "[document_fragment]"
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def appendChild(self, node):
|
||||
# XXX This code is not covered by the BS4 tests.
|
||||
self.soup.append(node.element)
|
||||
|
||||
def getDocument(self):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
self.attrs = dict(self.element.attrs)
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
def keys(self):
|
||||
return list(self.attrs.keys())
|
||||
def __len__(self):
|
||||
return len(self.attrs)
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
def __contains__(self, name):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(html5lib.treebuilders._base.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
html5lib.treebuilders._base.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
string_child = child = node
|
||||
elif isinstance(node, Tag):
|
||||
# Some other piece of code decided to pass in a Tag
|
||||
# instead of creating an Element object to contain the
|
||||
# Tag.
|
||||
child = node
|
||||
elif node.element.__class__ == NavigableString:
|
||||
string_child = child = node.element
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
and self.element.contents[-1].__class__ == NavigableString):
|
||||
# We are appending a string onto another string.
|
||||
# TODO This has O(n^2) performance, for input like
|
||||
# "a</a>a</a>a</a>..."
|
||||
old_element = self.element.contents[-1]
|
||||
new_element = self.soup.new_string(old_element + string_child)
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
# Tell Beautiful Soup to act as if it parsed this element
|
||||
# immediately after the parent's last descendant. (Or
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
self.soup.object_was_parsed(
|
||||
child, parent=self.element,
|
||||
most_recent_element=most_recent_element)
|
||||
|
||||
def getAttributes(self):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
for name, value in list(attributes.items()):
|
||||
if isinstance(name, tuple):
|
||||
new_name = NamespacedAttribute(*name)
|
||||
del attributes[name]
|
||||
attributes[new_name] = value
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
# Call set_up_substitutions manually.
|
||||
#
|
||||
# The Tag constructor called this method when the Tag was created,
|
||||
# but we just set/changed the attributes, so call it again.
|
||||
self.soup.builder.set_up_substitutions(self.element)
|
||||
attributes = property(getAttributes, setAttributes)
|
||||
|
||||
def insertText(self, data, insertBefore=None):
|
||||
if insertBefore:
|
||||
text = TextNode(self.soup.new_string(data), self.soup)
|
||||
self.insertBefore(data, insertBefore)
|
||||
else:
|
||||
self.appendChild(data)
|
||||
|
||||
def insertBefore(self, node, refNode):
|
||||
index = self.element.index(refNode.element)
|
||||
if (node.element.__class__ == NavigableString and self.element.contents
|
||||
and self.element.contents[index-1].__class__ == NavigableString):
|
||||
# (See comments in appendChild)
|
||||
old_node = self.element.contents[index-1]
|
||||
new_str = self.soup.new_string(old_node + node.element)
|
||||
old_node.replace_with(new_str)
|
||||
else:
|
||||
self.element.insert(index, node.element)
|
||||
node.parent = self
|
||||
|
||||
def removeChild(self, node):
|
||||
node.element.extract()
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
# are removed.
|
||||
final_next_element = element.next_sibling
|
||||
|
||||
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
|
||||
if len(new_parent_element.contents) > 0:
|
||||
# The new parent already contains children. We will be
|
||||
# appending this tag's children to the end.
|
||||
new_parents_last_child = new_parent_element.contents[-1]
|
||||
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
|
||||
else:
|
||||
# The new parent contains no children.
|
||||
new_parents_last_child = None
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
child.parent = new_parent_element
|
||||
new_parent_element.contents.append(child)
|
||||
|
||||
# Now that this element has no children, change its .next_element.
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
for key,value in self.attributes:
|
||||
node.attributes[key] = value
|
||||
return node
|
||||
|
||||
def hasContent(self):
|
||||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
||||
nameTuple = property(getNameTuple)
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
html5lib.treebuilders._base.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
def cloneNode(self):
|
||||
raise NotImplementedError
|
||||
@@ -1,258 +0,0 @@
|
||||
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
|
||||
|
||||
__all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
|
||||
# argument, which we'd like to set to False. Unfortunately,
|
||||
# http://bugs.python.org/issue13273 makes strict=True a better bet
|
||||
# before Python 3.2.3.
|
||||
#
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
Comment,
|
||||
Declaration,
|
||||
Doctype,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||
|
||||
from bs4.builder import (
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
STRICT,
|
||||
)
|
||||
|
||||
|
||||
HTMLPARSER = 'html.parser'
|
||||
|
||||
class BeautifulSoupHTMLParser(HTMLParser):
|
||||
def handle_starttag(self, name, attrs):
|
||||
# XXX namespace
|
||||
attr_dict = {}
|
||||
for key, value in attrs:
|
||||
# Change None attribute values to the empty string
|
||||
# for consistency with the other tree builders.
|
||||
if value is None:
|
||||
value = ''
|
||||
attr_dict[key] = value
|
||||
attrvalue = '""'
|
||||
self.soup.handle_starttag(name, None, None, attr_dict)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.soup.handle_data(data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
real_name = int(name.lstrip('X'), 16)
|
||||
else:
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
|
||||
if character is not None:
|
||||
data = character
|
||||
else:
|
||||
data = "&%s;" % name
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self.soup.endData()
|
||||
if data.startswith("DOCTYPE "):
|
||||
data = data[len("DOCTYPE "):]
|
||||
elif data == 'DOCTYPE':
|
||||
# i.e. "<!DOCTYPE>"
|
||||
data = ''
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Doctype)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
if data.upper().startswith('CDATA['):
|
||||
cls = CData
|
||||
data = data[len('CDATA['):]
|
||||
else:
|
||||
cls = Declaration
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(cls)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
|
||||
def feed(self, markup):
|
||||
args, kwargs = self.parser_args
|
||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
|
||||
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
|
||||
# string.
|
||||
#
|
||||
# XXX This code can be removed once most Python 3 users are on 3.2.3.
|
||||
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
|
||||
import re
|
||||
attrfind_tolerant = re.compile(
|
||||
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
|
||||
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
|
||||
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
|
||||
|
||||
locatestarttagend = re.compile(r"""
|
||||
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
|
||||
(?:\s+ # whitespace before attribute name
|
||||
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
|
||||
(?:\s*=\s* # value indicator
|
||||
(?:'[^']*' # LITA-enclosed value
|
||||
|\"[^\"]*\" # LIT-enclosed value
|
||||
|[^'\">\s]+ # bare value
|
||||
)
|
||||
)?
|
||||
)
|
||||
)*
|
||||
\s* # trailing whitespace
|
||||
""", re.VERBOSE)
|
||||
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
|
||||
|
||||
from html.parser import tagfind, attrfind
|
||||
|
||||
def parse_starttag(self, i):
|
||||
self.__starttag_text = None
|
||||
endpos = self.check_for_whole_start_tag(i)
|
||||
if endpos < 0:
|
||||
return endpos
|
||||
rawdata = self.rawdata
|
||||
self.__starttag_text = rawdata[i:endpos]
|
||||
|
||||
# Now parse the data between i+1 and j into a tag and attrs
|
||||
attrs = []
|
||||
match = tagfind.match(rawdata, i+1)
|
||||
assert match, 'unexpected call to parse_starttag()'
|
||||
k = match.end()
|
||||
self.lasttag = tag = rawdata[i+1:k].lower()
|
||||
while k < endpos:
|
||||
if self.strict:
|
||||
m = attrfind.match(rawdata, k)
|
||||
else:
|
||||
m = attrfind_tolerant.match(rawdata, k)
|
||||
if not m:
|
||||
break
|
||||
attrname, rest, attrvalue = m.group(1, 2, 3)
|
||||
if not rest:
|
||||
attrvalue = None
|
||||
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
|
||||
attrvalue[:1] == '"' == attrvalue[-1:]:
|
||||
attrvalue = attrvalue[1:-1]
|
||||
if attrvalue:
|
||||
attrvalue = self.unescape(attrvalue)
|
||||
attrs.append((attrname.lower(), attrvalue))
|
||||
k = m.end()
|
||||
|
||||
end = rawdata[k:endpos].strip()
|
||||
if end not in (">", "/>"):
|
||||
lineno, offset = self.getpos()
|
||||
if "\n" in self.__starttag_text:
|
||||
lineno = lineno + self.__starttag_text.count("\n")
|
||||
offset = len(self.__starttag_text) \
|
||||
- self.__starttag_text.rfind("\n")
|
||||
else:
|
||||
offset = offset + len(self.__starttag_text)
|
||||
if self.strict:
|
||||
self.error("junk characters in start tag: %r"
|
||||
% (rawdata[k:endpos][:20],))
|
||||
self.handle_data(rawdata[i:endpos])
|
||||
return endpos
|
||||
if end.endswith('/>'):
|
||||
# XHTML-style empty tag: <span attr="value" />
|
||||
self.handle_startendtag(tag, attrs)
|
||||
else:
|
||||
self.handle_starttag(tag, attrs)
|
||||
if tag in self.CDATA_CONTENT_ELEMENTS:
|
||||
self.set_cdata_mode(tag)
|
||||
return endpos
|
||||
|
||||
def set_cdata_mode(self, elem):
|
||||
self.cdata_elem = elem.lower()
|
||||
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
|
||||
|
||||
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
|
||||
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = True
|
||||
@@ -1,233 +0,0 @@
|
||||
__all__ = [
|
||||
'LXMLTreeBuilderForXML',
|
||||
'LXMLTreeBuilder',
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
PERMISSIVE,
|
||||
ParserRejectedMarkup,
|
||||
TreeBuilder,
|
||||
XML)
|
||||
from bs4.dammit import EncodingDetector
|
||||
|
||||
LXML = 'lxml'
|
||||
|
||||
class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
DEFAULT_PARSER_CLASS = etree.XMLParser
|
||||
|
||||
is_xml = True
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
# This namespace mapping is specified in the XML Namespace
|
||||
# standard.
|
||||
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
|
||||
|
||||
def default_parser(self, encoding):
|
||||
# This can either return a parser object or a class, which
|
||||
# will be instantiated with default arguments.
|
||||
if self._default_parser is not None:
|
||||
return self._default_parser
|
||||
return etree.XMLParser(
|
||||
target=self, strip_cdata=False, recover=True, encoding=encoding)
|
||||
|
||||
def parser_for(self, encoding):
|
||||
# Use the default parser.
|
||||
parser = self.default_parser(encoding)
|
||||
|
||||
if isinstance(parser, collections.Callable):
|
||||
# Instantiate the parser with default arguments
|
||||
parser = parser(target=self, strip_cdata=False, encoding=encoding)
|
||||
return parser
|
||||
|
||||
def __init__(self, parser=None, empty_element_tags=None):
|
||||
# TODO: Issue a warning if parser is present but not a
|
||||
# callable, since that means there's no way to create new
|
||||
# parsers for different encodings.
|
||||
self._default_parser = parser
|
||||
if empty_element_tags is not None:
|
||||
self.empty_element_tags = set(empty_element_tags)
|
||||
self.soup = None
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def _getNsTag(self, tag):
|
||||
# Split the namespace URL out of a fully-qualified lxml tag
|
||||
# name. Copied from lxml's src/lxml/sax.py.
|
||||
if tag[0] == '{':
|
||||
return tuple(tag[1:].split('}', 1))
|
||||
else:
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
(markup, encoding, declared encoding,
|
||||
has undergone character replacement)
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
document_declared_encoding, False)
|
||||
|
||||
# Instead of using UnicodeDammit to convert the bytestring to
|
||||
# Unicode using different encodings, use EncodingDetector to
|
||||
# iterate over the encodings, and tell lxml to try to parse
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
# or the parser won't be initialized.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
try:
|
||||
self.parser = self.parser_for(self.soup.original_encoding)
|
||||
self.parser.feed(data)
|
||||
while len(data) != 0:
|
||||
# Now call feed() on the rest of the data, chunk by chunk.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def start(self, name, attrs, nsmap={}):
|
||||
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
|
||||
attrs = dict(attrs)
|
||||
nsprefix = None
|
||||
# Invert each namespace map as it comes in.
|
||||
if len(self.nsmaps) > 1:
|
||||
# There are no new namespaces for this tag, but
|
||||
# non-default namespaces are in play, so we need a
|
||||
# separate tag stack to know when they end.
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
|
||||
# Namespaces are in play. Find any attributes that came in
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
else:
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
attr = NamespacedAttribute(nsprefix, attr, namespace)
|
||||
new_attrs[attr] = value
|
||||
attrs = new_attrs
|
||||
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
|
||||
|
||||
def _prefix_for_namespace(self, namespace):
|
||||
"""Find the currently active prefix for the given namespace."""
|
||||
if namespace is None:
|
||||
return None
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
return inverted_nsmap[namespace]
|
||||
return None
|
||||
|
||||
def end(self, name):
|
||||
self.soup.endData()
|
||||
completed_tag = self.soup.tagStack[-1]
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = None
|
||||
if namespace is not None:
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
nsprefix = inverted_nsmap[namespace]
|
||||
break
|
||||
self.soup.handle_endtag(name, nsprefix)
|
||||
if len(self.nsmaps) > 1:
|
||||
# This tag, or one of its parents, introduced a namespace
|
||||
# mapping, so pop it off the stack.
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def doctype(self, name, pubid, system):
|
||||
self.soup.endData()
|
||||
doctype = Doctype.for_name_and_ids(name, pubid, system)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def comment(self, content):
|
||||
"Handle comments as Comment objects."
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(content)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
return etree.HTMLParser
|
||||
|
||||
def feed(self, markup):
|
||||
encoding = self.soup.original_encoding
|
||||
try:
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
@@ -1,829 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Beautiful Soup bonus library: Unicode, Dammit
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
|
||||
# Import a library to autodetect character encodings.
|
||||
chardet_type = None
|
||||
try:
|
||||
# First try the fast C implementation.
|
||||
# PyPI package: cchardet
|
||||
import cchardet
|
||||
def chardet_dammit(s):
|
||||
return cchardet.detect(s)['encoding']
|
||||
except ImportError:
|
||||
try:
|
||||
# Fall back to the pure Python implementation
|
||||
# Debian package: python-chardet
|
||||
# PyPI package: chardet
|
||||
import chardet
|
||||
def chardet_dammit(s):
|
||||
return chardet.detect(s)['encoding']
|
||||
#import chardet.constants
|
||||
#chardet.constants._debug = 1
|
||||
except ImportError:
|
||||
# No chardet available.
|
||||
def chardet_dammit(s):
|
||||
return None
|
||||
|
||||
# Available from http://cjkpython.i18n.org/.
|
||||
try:
|
||||
import iconv_codec
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
xml_encoding_re = re.compile(
|
||||
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
|
||||
html_meta_re = re.compile(
|
||||
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
|
||||
|
||||
class EntitySubstitution(object):
|
||||
|
||||
"""Substitute XML or HTML entities for the corresponding characters."""
|
||||
|
||||
def _populate_class_variables():
|
||||
lookup = {}
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
# is handled elsewhere.
|
||||
characters_for_re.append(character)
|
||||
lookup[character] = name
|
||||
# But we do want to turn " into the quotation mark.
|
||||
reverse_lookup[name] = character
|
||||
re_definition = "[%s]" % "".join(characters_for_re)
|
||||
return lookup, reverse_lookup, re.compile(re_definition)
|
||||
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
|
||||
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
|
||||
|
||||
CHARACTER_TO_XML_ENTITY = {
|
||||
"'": "apos",
|
||||
'"': "quot",
|
||||
"&": "amp",
|
||||
"<": "lt",
|
||||
">": "gt",
|
||||
}
|
||||
|
||||
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
|
||||
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
|
||||
")")
|
||||
|
||||
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
|
||||
|
||||
@classmethod
|
||||
def _substitute_html_entity(cls, matchobj):
|
||||
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def _substitute_xml_entity(cls, matchobj):
|
||||
"""Used with a regular expression to substitute the
|
||||
appropriate XML entity for an XML special character."""
|
||||
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def quoted_attribute_value(self, value):
|
||||
"""Make a value into a quoted XML attribute, possibly escaping it.
|
||||
|
||||
Most strings will be quoted using double quotes.
|
||||
|
||||
Bob's Bar -> "Bob's Bar"
|
||||
|
||||
If a string contains double quotes, it will be quoted using
|
||||
single quotes.
|
||||
|
||||
Welcome to "my bar" -> 'Welcome to "my bar"'
|
||||
|
||||
If a string contains both single and double quotes, the
|
||||
double quotes will be escaped, and the string will be quoted
|
||||
using double quotes.
|
||||
|
||||
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
|
||||
"""
|
||||
quote_with = '"'
|
||||
if '"' in value:
|
||||
if "'" in value:
|
||||
# The string contains both single and double
|
||||
# quotes. Turn the double quotes into
|
||||
# entities. We quote the double quotes rather than
|
||||
# the single quotes because the entity name is
|
||||
# """ whether this is HTML or XML. If we
|
||||
# quoted the single quotes, we'd have to decide
|
||||
# between ' and &squot;.
|
||||
replace_with = """
|
||||
value = value.replace('"', replace_with)
|
||||
else:
|
||||
# There are double quotes but no single quotes.
|
||||
# We can use single quotes to quote the attribute.
|
||||
quote_with = "'"
|
||||
return quote_with + value + quote_with
|
||||
|
||||
@classmethod
|
||||
def substitute_xml(cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign
|
||||
will become <, the greater-than sign will become >,
|
||||
and any ampersands will become &. If you want ampersands
|
||||
that appear to be part of an entity definition to be left
|
||||
alone, use substitute_xml_containing_entities() instead.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets and ampersands.
|
||||
value = cls.AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_xml_containing_entities(
|
||||
cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign will
|
||||
become <, the greater-than sign will become >, and any
|
||||
ampersands that are not part of an entity defition will
|
||||
become &.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets, and ampersands that aren't part of
|
||||
# entities.
|
||||
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_html(cls, s):
|
||||
"""Replace certain Unicode characters with named HTML entities.
|
||||
|
||||
This differs from data.encode(encoding, 'xmlcharrefreplace')
|
||||
in that the goal is to make the result more readable (to those
|
||||
with ASCII displays) rather than to recover from
|
||||
errors. There's absolutely nothing wrong with a UTF-8 string
|
||||
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
|
||||
character with "é" will make it more readable to some
|
||||
people.
|
||||
"""
|
||||
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
|
||||
cls._substitute_html_entity, s)
|
||||
|
||||
|
||||
class EncodingDetector:
|
||||
"""Suggests a number of possible encodings for a bytestring.
|
||||
|
||||
Order of precedence:
|
||||
|
||||
1. Encodings you specifically tell EncodingDetector to try first
|
||||
(the override_encodings argument to the constructor).
|
||||
|
||||
2. An encoding declared within the bytestring itself, either in an
|
||||
XML declaration (if the bytestring is to be interpreted as an XML
|
||||
document), or in a <meta> tag (if the bytestring is to be
|
||||
interpreted as an HTML document.)
|
||||
|
||||
3. An encoding detected through textual analysis by chardet,
|
||||
cchardet, or a similar external library.
|
||||
|
||||
4. UTF-8.
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
|
||||
# First order of business: strip a byte-order mark.
|
||||
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
|
||||
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def encodings(self):
|
||||
"""Yield a number of encodings that might work for this markup."""
|
||||
tried = set()
|
||||
for e in self.override_encodings:
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
# Did the document originally start with a byte-order mark
|
||||
# that indicated its encoding?
|
||||
if self._usable(self.sniffed_encoding, tried):
|
||||
yield self.sniffed_encoding
|
||||
|
||||
# Look within the document for an XML or HTML encoding
|
||||
# declaration.
|
||||
if self.declared_encoding is None:
|
||||
self.declared_encoding = self.find_declared_encoding(
|
||||
self.markup, self.is_html)
|
||||
if self._usable(self.declared_encoding, tried):
|
||||
yield self.declared_encoding
|
||||
|
||||
# Use third-party character set detection to guess at the
|
||||
# encoding.
|
||||
if self.chardet_encoding is None:
|
||||
self.chardet_encoding = chardet_dammit(self.markup)
|
||||
if self._usable(self.chardet_encoding, tried):
|
||||
yield self.chardet_encoding
|
||||
|
||||
# As a last-ditch effort, try utf-8 and windows-1252.
|
||||
for e in ('utf-8', 'windows-1252'):
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
@classmethod
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
data = data[2:]
|
||||
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16le'
|
||||
data = data[2:]
|
||||
elif data[:3] == b'\xef\xbb\xbf':
|
||||
encoding = 'utf-8'
|
||||
data = data[3:]
|
||||
elif data[:4] == b'\x00\x00\xfe\xff':
|
||||
encoding = 'utf-32be'
|
||||
data = data[4:]
|
||||
elif data[:4] == b'\xff\xfe\x00\x00':
|
||||
encoding = 'utf-32le'
|
||||
data = data[4:]
|
||||
return data, encoding
|
||||
|
||||
@classmethod
|
||||
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
|
||||
"""Given a document, tries to find its declared encoding.
|
||||
|
||||
An XML encoding is declared at the beginning of the document.
|
||||
|
||||
An HTML encoding is declared in a <meta> tag, hopefully near the
|
||||
beginning of the document.
|
||||
"""
|
||||
if search_entire_document:
|
||||
xml_endpos = html_endpos = len(markup)
|
||||
else:
|
||||
xml_endpos = 1024
|
||||
html_endpos = max(2048, int(len(markup) * 0.05))
|
||||
|
||||
declared_encoding = None
|
||||
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
|
||||
if not declared_encoding_match and is_html:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
|
||||
class UnicodeDammit:
|
||||
"""A class for detecting the encoding of a *ML document and
|
||||
converting it to a Unicode string. If the source encoding is
|
||||
windows-1252, can replace MS smart quotes with their HTML or XML
|
||||
equivalents."""
|
||||
|
||||
# This dictionary maps commonly seen values for "charset" in HTML
|
||||
# meta tags to the corresponding Python codec names. It only covers
|
||||
# values that aren't in Python's aliases and can't be determined
|
||||
# by the heuristics in find_codec.
|
||||
CHARSET_ALIASES = {"macintosh": "mac-roman",
|
||||
"x-sjis": "shift-jis"}
|
||||
|
||||
ENCODINGS_WITH_SMART_QUOTES = [
|
||||
"windows-1252",
|
||||
"iso-8859-1",
|
||||
"iso-8859-2",
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
# The encoding detector may have stripped a byte-order mark.
|
||||
# Use the stripped markup from this point on.
|
||||
self.markup = self.detector.markup
|
||||
|
||||
u = None
|
||||
for encoding in self.detector.encodings:
|
||||
markup = self.detector.markup
|
||||
u = self._convert_from(encoding)
|
||||
if u is not None:
|
||||
break
|
||||
|
||||
if not u:
|
||||
# None of the encodings worked. As an absolute last resort,
|
||||
# try them again with character replacement.
|
||||
|
||||
for encoding in self.detector.encodings:
|
||||
if encoding != "ascii":
|
||||
u = self._convert_from(encoding, "replace")
|
||||
if u is not None:
|
||||
logging.warning(
|
||||
"Some characters could not be decoded, and were "
|
||||
"replaced with REPLACEMENT CHARACTER.")
|
||||
self.contains_replacement_characters = True
|
||||
break
|
||||
|
||||
# If none of that worked, we could at this point force it to
|
||||
# ASCII, but that would destroy so much data that I think
|
||||
# giving up is better.
|
||||
self.unicode_markup = u
|
||||
if not u:
|
||||
self.original_encoding = None
|
||||
|
||||
def _sub_ms_char(self, match):
|
||||
"""Changes a MS smart quote character to an XML or HTML
|
||||
entity, or an ASCII character."""
|
||||
orig = match.group(1)
|
||||
if self.smart_quotes_to == 'ascii':
|
||||
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
|
||||
else:
|
||||
sub = self.MS_CHARS.get(orig)
|
||||
if type(sub) == tuple:
|
||||
if self.smart_quotes_to == 'xml':
|
||||
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
|
||||
else:
|
||||
sub = '&'.encode() + sub[0].encode() + ';'.encode()
|
||||
else:
|
||||
sub = sub.encode()
|
||||
return sub
|
||||
|
||||
def _convert_from(self, proposed, errors="strict"):
|
||||
proposed = self.find_codec(proposed)
|
||||
if not proposed or (proposed, errors) in self.tried_encodings:
|
||||
return None
|
||||
self.tried_encodings.append((proposed, errors))
|
||||
markup = self.markup
|
||||
# Convert smart quotes to HTML if coming from an encoding
|
||||
# that might have them.
|
||||
if (self.smart_quotes_to is not None
|
||||
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
|
||||
smart_quotes_re = b"([\x80-\x9f])"
|
||||
smart_quotes_compiled = re.compile(smart_quotes_re)
|
||||
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
|
||||
|
||||
try:
|
||||
#print "Trying to convert document to %s (errors=%s)" % (
|
||||
# proposed, errors)
|
||||
u = self._to_unicode(markup, proposed, errors)
|
||||
self.markup = u
|
||||
self.original_encoding = proposed
|
||||
except Exception as e:
|
||||
#print "That didn't work!"
|
||||
#print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
if not self.is_html:
|
||||
return None
|
||||
return self.detector.declared_encoding
|
||||
|
||||
def find_codec(self, charset):
|
||||
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
|
||||
or (charset and self._codec(charset.replace("-", "")))
|
||||
or (charset and self._codec(charset.replace("-", "_")))
|
||||
or (charset and charset.lower())
|
||||
or charset
|
||||
)
|
||||
if value:
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
def _codec(self, charset):
|
||||
if not charset:
|
||||
return charset
|
||||
codec = None
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
codec = charset
|
||||
except (LookupError, ValueError):
|
||||
pass
|
||||
return codec
|
||||
|
||||
|
||||
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
|
||||
MS_CHARS = {b'\x80': ('euro', '20AC'),
|
||||
b'\x81': ' ',
|
||||
b'\x82': ('sbquo', '201A'),
|
||||
b'\x83': ('fnof', '192'),
|
||||
b'\x84': ('bdquo', '201E'),
|
||||
b'\x85': ('hellip', '2026'),
|
||||
b'\x86': ('dagger', '2020'),
|
||||
b'\x87': ('Dagger', '2021'),
|
||||
b'\x88': ('circ', '2C6'),
|
||||
b'\x89': ('permil', '2030'),
|
||||
b'\x8A': ('Scaron', '160'),
|
||||
b'\x8B': ('lsaquo', '2039'),
|
||||
b'\x8C': ('OElig', '152'),
|
||||
b'\x8D': '?',
|
||||
b'\x8E': ('#x17D', '17D'),
|
||||
b'\x8F': '?',
|
||||
b'\x90': '?',
|
||||
b'\x91': ('lsquo', '2018'),
|
||||
b'\x92': ('rsquo', '2019'),
|
||||
b'\x93': ('ldquo', '201C'),
|
||||
b'\x94': ('rdquo', '201D'),
|
||||
b'\x95': ('bull', '2022'),
|
||||
b'\x96': ('ndash', '2013'),
|
||||
b'\x97': ('mdash', '2014'),
|
||||
b'\x98': ('tilde', '2DC'),
|
||||
b'\x99': ('trade', '2122'),
|
||||
b'\x9a': ('scaron', '161'),
|
||||
b'\x9b': ('rsaquo', '203A'),
|
||||
b'\x9c': ('oelig', '153'),
|
||||
b'\x9d': '?',
|
||||
b'\x9e': ('#x17E', '17E'),
|
||||
b'\x9f': ('Yuml', ''),}
|
||||
|
||||
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
|
||||
# horrors like stripping diacritical marks to turn á into a, but also
|
||||
# contains non-horrors like turning “ into ".
|
||||
MS_CHARS_TO_ASCII = {
|
||||
b'\x80' : 'EUR',
|
||||
b'\x81' : ' ',
|
||||
b'\x82' : ',',
|
||||
b'\x83' : 'f',
|
||||
b'\x84' : ',,',
|
||||
b'\x85' : '...',
|
||||
b'\x86' : '+',
|
||||
b'\x87' : '++',
|
||||
b'\x88' : '^',
|
||||
b'\x89' : '%',
|
||||
b'\x8a' : 'S',
|
||||
b'\x8b' : '<',
|
||||
b'\x8c' : 'OE',
|
||||
b'\x8d' : '?',
|
||||
b'\x8e' : 'Z',
|
||||
b'\x8f' : '?',
|
||||
b'\x90' : '?',
|
||||
b'\x91' : "'",
|
||||
b'\x92' : "'",
|
||||
b'\x93' : '"',
|
||||
b'\x94' : '"',
|
||||
b'\x95' : '*',
|
||||
b'\x96' : '-',
|
||||
b'\x97' : '--',
|
||||
b'\x98' : '~',
|
||||
b'\x99' : '(TM)',
|
||||
b'\x9a' : 's',
|
||||
b'\x9b' : '>',
|
||||
b'\x9c' : 'oe',
|
||||
b'\x9d' : '?',
|
||||
b'\x9e' : 'z',
|
||||
b'\x9f' : 'Y',
|
||||
b'\xa0' : ' ',
|
||||
b'\xa1' : '!',
|
||||
b'\xa2' : 'c',
|
||||
b'\xa3' : 'GBP',
|
||||
b'\xa4' : '$', #This approximation is especially parochial--this is the
|
||||
#generic currency symbol.
|
||||
b'\xa5' : 'YEN',
|
||||
b'\xa6' : '|',
|
||||
b'\xa7' : 'S',
|
||||
b'\xa8' : '..',
|
||||
b'\xa9' : '',
|
||||
b'\xaa' : '(th)',
|
||||
b'\xab' : '<<',
|
||||
b'\xac' : '!',
|
||||
b'\xad' : ' ',
|
||||
b'\xae' : '(R)',
|
||||
b'\xaf' : '-',
|
||||
b'\xb0' : 'o',
|
||||
b'\xb1' : '+-',
|
||||
b'\xb2' : '2',
|
||||
b'\xb3' : '3',
|
||||
b'\xb4' : ("'", 'acute'),
|
||||
b'\xb5' : 'u',
|
||||
b'\xb6' : 'P',
|
||||
b'\xb7' : '*',
|
||||
b'\xb8' : ',',
|
||||
b'\xb9' : '1',
|
||||
b'\xba' : '(th)',
|
||||
b'\xbb' : '>>',
|
||||
b'\xbc' : '1/4',
|
||||
b'\xbd' : '1/2',
|
||||
b'\xbe' : '3/4',
|
||||
b'\xbf' : '?',
|
||||
b'\xc0' : 'A',
|
||||
b'\xc1' : 'A',
|
||||
b'\xc2' : 'A',
|
||||
b'\xc3' : 'A',
|
||||
b'\xc4' : 'A',
|
||||
b'\xc5' : 'A',
|
||||
b'\xc6' : 'AE',
|
||||
b'\xc7' : 'C',
|
||||
b'\xc8' : 'E',
|
||||
b'\xc9' : 'E',
|
||||
b'\xca' : 'E',
|
||||
b'\xcb' : 'E',
|
||||
b'\xcc' : 'I',
|
||||
b'\xcd' : 'I',
|
||||
b'\xce' : 'I',
|
||||
b'\xcf' : 'I',
|
||||
b'\xd0' : 'D',
|
||||
b'\xd1' : 'N',
|
||||
b'\xd2' : 'O',
|
||||
b'\xd3' : 'O',
|
||||
b'\xd4' : 'O',
|
||||
b'\xd5' : 'O',
|
||||
b'\xd6' : 'O',
|
||||
b'\xd7' : '*',
|
||||
b'\xd8' : 'O',
|
||||
b'\xd9' : 'U',
|
||||
b'\xda' : 'U',
|
||||
b'\xdb' : 'U',
|
||||
b'\xdc' : 'U',
|
||||
b'\xdd' : 'Y',
|
||||
b'\xde' : 'b',
|
||||
b'\xdf' : 'B',
|
||||
b'\xe0' : 'a',
|
||||
b'\xe1' : 'a',
|
||||
b'\xe2' : 'a',
|
||||
b'\xe3' : 'a',
|
||||
b'\xe4' : 'a',
|
||||
b'\xe5' : 'a',
|
||||
b'\xe6' : 'ae',
|
||||
b'\xe7' : 'c',
|
||||
b'\xe8' : 'e',
|
||||
b'\xe9' : 'e',
|
||||
b'\xea' : 'e',
|
||||
b'\xeb' : 'e',
|
||||
b'\xec' : 'i',
|
||||
b'\xed' : 'i',
|
||||
b'\xee' : 'i',
|
||||
b'\xef' : 'i',
|
||||
b'\xf0' : 'o',
|
||||
b'\xf1' : 'n',
|
||||
b'\xf2' : 'o',
|
||||
b'\xf3' : 'o',
|
||||
b'\xf4' : 'o',
|
||||
b'\xf5' : 'o',
|
||||
b'\xf6' : 'o',
|
||||
b'\xf7' : '/',
|
||||
b'\xf8' : 'o',
|
||||
b'\xf9' : 'u',
|
||||
b'\xfa' : 'u',
|
||||
b'\xfb' : 'u',
|
||||
b'\xfc' : 'u',
|
||||
b'\xfd' : 'y',
|
||||
b'\xfe' : 'b',
|
||||
b'\xff' : 'y',
|
||||
}
|
||||
|
||||
# A map used when removing rogue Windows-1252/ISO-8859-1
|
||||
# characters in otherwise UTF-8 documents.
|
||||
#
|
||||
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
|
||||
# Windows-1252.
|
||||
WINDOWS_1252_TO_UTF8 = {
|
||||
0x80 : b'\xe2\x82\xac', # €
|
||||
0x82 : b'\xe2\x80\x9a', # ‚
|
||||
0x83 : b'\xc6\x92', # ƒ
|
||||
0x84 : b'\xe2\x80\x9e', # „
|
||||
0x85 : b'\xe2\x80\xa6', # …
|
||||
0x86 : b'\xe2\x80\xa0', # †
|
||||
0x87 : b'\xe2\x80\xa1', # ‡
|
||||
0x88 : b'\xcb\x86', # ˆ
|
||||
0x89 : b'\xe2\x80\xb0', # ‰
|
||||
0x8a : b'\xc5\xa0', # Š
|
||||
0x8b : b'\xe2\x80\xb9', # ‹
|
||||
0x8c : b'\xc5\x92', # Œ
|
||||
0x8e : b'\xc5\xbd', # Ž
|
||||
0x91 : b'\xe2\x80\x98', # ‘
|
||||
0x92 : b'\xe2\x80\x99', # ’
|
||||
0x93 : b'\xe2\x80\x9c', # “
|
||||
0x94 : b'\xe2\x80\x9d', # ”
|
||||
0x95 : b'\xe2\x80\xa2', # •
|
||||
0x96 : b'\xe2\x80\x93', # –
|
||||
0x97 : b'\xe2\x80\x94', # —
|
||||
0x98 : b'\xcb\x9c', # ˜
|
||||
0x99 : b'\xe2\x84\xa2', # ™
|
||||
0x9a : b'\xc5\xa1', # š
|
||||
0x9b : b'\xe2\x80\xba', # ›
|
||||
0x9c : b'\xc5\x93', # œ
|
||||
0x9e : b'\xc5\xbe', # ž
|
||||
0x9f : b'\xc5\xb8', # Ÿ
|
||||
0xa0 : b'\xc2\xa0', #
|
||||
0xa1 : b'\xc2\xa1', # ¡
|
||||
0xa2 : b'\xc2\xa2', # ¢
|
||||
0xa3 : b'\xc2\xa3', # £
|
||||
0xa4 : b'\xc2\xa4', # ¤
|
||||
0xa5 : b'\xc2\xa5', # ¥
|
||||
0xa6 : b'\xc2\xa6', # ¦
|
||||
0xa7 : b'\xc2\xa7', # §
|
||||
0xa8 : b'\xc2\xa8', # ¨
|
||||
0xa9 : b'\xc2\xa9', # ©
|
||||
0xaa : b'\xc2\xaa', # ª
|
||||
0xab : b'\xc2\xab', # «
|
||||
0xac : b'\xc2\xac', # ¬
|
||||
0xad : b'\xc2\xad', #
|
||||
0xae : b'\xc2\xae', # ®
|
||||
0xaf : b'\xc2\xaf', # ¯
|
||||
0xb0 : b'\xc2\xb0', # °
|
||||
0xb1 : b'\xc2\xb1', # ±
|
||||
0xb2 : b'\xc2\xb2', # ²
|
||||
0xb3 : b'\xc2\xb3', # ³
|
||||
0xb4 : b'\xc2\xb4', # ´
|
||||
0xb5 : b'\xc2\xb5', # µ
|
||||
0xb6 : b'\xc2\xb6', # ¶
|
||||
0xb7 : b'\xc2\xb7', # ·
|
||||
0xb8 : b'\xc2\xb8', # ¸
|
||||
0xb9 : b'\xc2\xb9', # ¹
|
||||
0xba : b'\xc2\xba', # º
|
||||
0xbb : b'\xc2\xbb', # »
|
||||
0xbc : b'\xc2\xbc', # ¼
|
||||
0xbd : b'\xc2\xbd', # ½
|
||||
0xbe : b'\xc2\xbe', # ¾
|
||||
0xbf : b'\xc2\xbf', # ¿
|
||||
0xc0 : b'\xc3\x80', # À
|
||||
0xc1 : b'\xc3\x81', # Á
|
||||
0xc2 : b'\xc3\x82', # Â
|
||||
0xc3 : b'\xc3\x83', # Ã
|
||||
0xc4 : b'\xc3\x84', # Ä
|
||||
0xc5 : b'\xc3\x85', # Å
|
||||
0xc6 : b'\xc3\x86', # Æ
|
||||
0xc7 : b'\xc3\x87', # Ç
|
||||
0xc8 : b'\xc3\x88', # È
|
||||
0xc9 : b'\xc3\x89', # É
|
||||
0xca : b'\xc3\x8a', # Ê
|
||||
0xcb : b'\xc3\x8b', # Ë
|
||||
0xcc : b'\xc3\x8c', # Ì
|
||||
0xcd : b'\xc3\x8d', # Í
|
||||
0xce : b'\xc3\x8e', # Î
|
||||
0xcf : b'\xc3\x8f', # Ï
|
||||
0xd0 : b'\xc3\x90', # Ð
|
||||
0xd1 : b'\xc3\x91', # Ñ
|
||||
0xd2 : b'\xc3\x92', # Ò
|
||||
0xd3 : b'\xc3\x93', # Ó
|
||||
0xd4 : b'\xc3\x94', # Ô
|
||||
0xd5 : b'\xc3\x95', # Õ
|
||||
0xd6 : b'\xc3\x96', # Ö
|
||||
0xd7 : b'\xc3\x97', # ×
|
||||
0xd8 : b'\xc3\x98', # Ø
|
||||
0xd9 : b'\xc3\x99', # Ù
|
||||
0xda : b'\xc3\x9a', # Ú
|
||||
0xdb : b'\xc3\x9b', # Û
|
||||
0xdc : b'\xc3\x9c', # Ü
|
||||
0xdd : b'\xc3\x9d', # Ý
|
||||
0xde : b'\xc3\x9e', # Þ
|
||||
0xdf : b'\xc3\x9f', # ß
|
||||
0xe0 : b'\xc3\xa0', # à
|
||||
0xe1 : b'\xa1', # á
|
||||
0xe2 : b'\xc3\xa2', # â
|
||||
0xe3 : b'\xc3\xa3', # ã
|
||||
0xe4 : b'\xc3\xa4', # ä
|
||||
0xe5 : b'\xc3\xa5', # å
|
||||
0xe6 : b'\xc3\xa6', # æ
|
||||
0xe7 : b'\xc3\xa7', # ç
|
||||
0xe8 : b'\xc3\xa8', # è
|
||||
0xe9 : b'\xc3\xa9', # é
|
||||
0xea : b'\xc3\xaa', # ê
|
||||
0xeb : b'\xc3\xab', # ë
|
||||
0xec : b'\xc3\xac', # ì
|
||||
0xed : b'\xc3\xad', # í
|
||||
0xee : b'\xc3\xae', # î
|
||||
0xef : b'\xc3\xaf', # ï
|
||||
0xf0 : b'\xc3\xb0', # ð
|
||||
0xf1 : b'\xc3\xb1', # ñ
|
||||
0xf2 : b'\xc3\xb2', # ò
|
||||
0xf3 : b'\xc3\xb3', # ó
|
||||
0xf4 : b'\xc3\xb4', # ô
|
||||
0xf5 : b'\xc3\xb5', # õ
|
||||
0xf6 : b'\xc3\xb6', # ö
|
||||
0xf7 : b'\xc3\xb7', # ÷
|
||||
0xf8 : b'\xc3\xb8', # ø
|
||||
0xf9 : b'\xc3\xb9', # ù
|
||||
0xfa : b'\xc3\xba', # ú
|
||||
0xfb : b'\xc3\xbb', # û
|
||||
0xfc : b'\xc3\xbc', # ü
|
||||
0xfd : b'\xc3\xbd', # ý
|
||||
0xfe : b'\xc3\xbe', # þ
|
||||
}
|
||||
|
||||
MULTIBYTE_MARKERS_AND_SIZES = [
|
||||
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
|
||||
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
|
||||
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
|
||||
]
|
||||
|
||||
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
|
||||
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
|
||||
|
||||
@classmethod
|
||||
def detwingle(cls, in_bytes, main_encoding="utf8",
|
||||
embedded_encoding="windows-1252"):
|
||||
"""Fix characters from one encoding embedded in some other encoding.
|
||||
|
||||
Currently the only situation supported is Windows-1252 (or its
|
||||
subset ISO-8859-1), embedded in UTF-8.
|
||||
|
||||
The input must be a bytestring. If you've already converted
|
||||
the document to Unicode, you're too late.
|
||||
|
||||
The output is a bytestring in which `embedded_encoding`
|
||||
characters have been converted to their `main_encoding`
|
||||
equivalents.
|
||||
"""
|
||||
if embedded_encoding.replace('_', '-').lower() not in (
|
||||
'windows-1252', 'windows_1252'):
|
||||
raise NotImplementedError(
|
||||
"Windows-1252 and ISO-8859-1 are the only currently supported "
|
||||
"embedded encodings.")
|
||||
|
||||
if main_encoding.lower() not in ('utf8', 'utf-8'):
|
||||
raise NotImplementedError(
|
||||
"UTF-8 is the only currently supported main encoding.")
|
||||
|
||||
byte_chunks = []
|
||||
|
||||
chunk_start = 0
|
||||
pos = 0
|
||||
while pos < len(in_bytes):
|
||||
byte = in_bytes[pos]
|
||||
if not isinstance(byte, int):
|
||||
# Python 2.x
|
||||
byte = ord(byte)
|
||||
if (byte >= cls.FIRST_MULTIBYTE_MARKER
|
||||
and byte <= cls.LAST_MULTIBYTE_MARKER):
|
||||
# This is the start of a UTF-8 multibyte character. Skip
|
||||
# to the end.
|
||||
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
|
||||
if byte >= start and byte <= end:
|
||||
pos += size
|
||||
break
|
||||
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
|
||||
# We found a Windows-1252 character!
|
||||
# Save the string up to this point as a chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:pos])
|
||||
|
||||
# Now translate the Windows-1252 character into UTF-8
|
||||
# and add it as another, one-byte chunk.
|
||||
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
|
||||
pos += 1
|
||||
chunk_start = pos
|
||||
else:
|
||||
# Go on to the next character.
|
||||
pos += 1
|
||||
if chunk_start == 0:
|
||||
# The string is unchanged.
|
||||
return in_bytes
|
||||
else:
|
||||
# Store the final chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:])
|
||||
return b''.join(byte_chunks)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
import cProfile
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
|
||||
import os
|
||||
import pstats
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import sys
|
||||
import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
for builder in builder_registry.builders:
|
||||
if name in builder.features:
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
|
||||
This lets you see how lxml parses a document when no Beautiful
|
||||
Soup code is running.
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
|
||||
def _p(self, s):
|
||||
print(s)
|
||||
|
||||
def handle_starttag(self, name, attrs):
|
||||
self._p("%s START" % name)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self._p("%s END" % name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self._p("%s DATA" % data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
self._p("%s CHARREF" % name)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
self._p("%s ENTITYREF" % name)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self._p("%s COMMENT" % data)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self._p("%s DECL" % data)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
self._p("%s UNKNOWN-DECL" % data)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self._p("%s PI" % data)
|
||||
|
||||
def htmlparser_trace(data):
|
||||
"""Print out the HTMLParser events that occur during parsing.
|
||||
|
||||
This lets you see how HTMLParser parses a document when no
|
||||
Beautiful Soup code is running.
|
||||
"""
|
||||
parser = AnnouncingParser()
|
||||
parser.feed(data)
|
||||
|
||||
_vowels = "aeiou"
|
||||
_consonants = "bcdfghjklmnpqrstvwxyz"
|
||||
|
||||
def rword(length=5):
|
||||
"Generate a random word-like string."
|
||||
s = ''
|
||||
for i in range(length):
|
||||
if i % 2 == 0:
|
||||
t = _consonants
|
||||
else:
|
||||
t = _vowels
|
||||
s += random.choice(t)
|
||||
return s
|
||||
|
||||
def rsentence(length=4):
|
||||
"Generate a random sentence-like string."
|
||||
return " ".join(rword(random.randint(4,9)) for i in range(length))
|
||||
|
||||
def rdoc(num_elements=1000):
|
||||
"""Randomly generate an invalid HTML document."""
|
||||
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
|
||||
elements = []
|
||||
for i in range(num_elements):
|
||||
choice = random.randint(0,3)
|
||||
if choice == 0:
|
||||
# New tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("<%s>" % tag_name)
|
||||
elif choice == 1:
|
||||
elements.append(rsentence(random.randint(1,4)))
|
||||
elif choice == 2:
|
||||
# Close a tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("</%s>" % tag_name)
|
||||
return "<html>" + "\n".join(elements) + "</html>"
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
try:
|
||||
a = time.time()
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
|
||||
data = rdoc(num_elements)
|
||||
vars = dict(bs4=bs4, data=data, parser=parser)
|
||||
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
|
||||
|
||||
stats = pstats.Stats(filename)
|
||||
# stats.strip_dirs()
|
||||
stats.sort_stats("cumulative")
|
||||
stats.print_stats('_html5lib|bs4', 50)
|
||||
|
||||
if __name__ == '__main__':
|
||||
diagnose(sys.stdin.read())
|
||||
@@ -1,592 +0,0 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
Comment,
|
||||
ContentMetaAttributeValue,
|
||||
Doctype,
|
||||
SoupStrainer,
|
||||
)
|
||||
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
default_builder = HTMLParserTreeBuilder
|
||||
|
||||
|
||||
class SoupTest(unittest.TestCase):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return default_builder()
|
||||
|
||||
def soup(self, markup, **kwargs):
|
||||
"""Build a Beautiful Soup object from markup."""
|
||||
builder = kwargs.pop('builder', self.default_builder)
|
||||
return BeautifulSoup(markup, builder=builder, **kwargs)
|
||||
|
||||
def document_for(self, markup):
|
||||
"""Turn an HTML fragment into a document.
|
||||
|
||||
The details depend on the builder.
|
||||
"""
|
||||
return self.default_builder.test_fragment_to_document(markup)
|
||||
|
||||
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
|
||||
builder = self.default_builder
|
||||
obj = BeautifulSoup(to_parse, builder=builder)
|
||||
if compare_parsed_to is None:
|
||||
compare_parsed_to = to_parse
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
"""A basic test of a treebuilder's competence.
|
||||
|
||||
Any HTML treebuilder, present or future, should be able to pass
|
||||
these tests. With invalid markup, there's room for interpretation,
|
||||
and different parsers can handle it differently. But with the
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
|
||||
# Make sure a Doctype object was created.
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual(doctype.__class__, Doctype)
|
||||
self.assertEqual(doctype, doctype_fragment)
|
||||
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
|
||||
|
||||
# Make sure that the doctype was correctly associated with the
|
||||
# parse tree and that the rest of the document parsed.
|
||||
self.assertEqual(soup.p.contents[0], 'foo')
|
||||
|
||||
def _document_with_doctype(self, doctype_fragment):
|
||||
"""Generate and parse a document with the given doctype."""
|
||||
doctype = '<!DOCTYPE %s>' % doctype_fragment
|
||||
markup = doctype + '\n<p>foo</p>'
|
||||
soup = self.soup(markup)
|
||||
return doctype, soup
|
||||
|
||||
def test_normal_doctypes(self):
|
||||
"""Make sure normal, everyday HTML doctypes are handled correctly."""
|
||||
self.assertDoctypeHandled("html")
|
||||
self.assertDoctypeHandled(
|
||||
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
|
||||
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_public_doctype_with_url(self):
|
||||
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
|
||||
self.assertDoctypeHandled(doctype)
|
||||
|
||||
def test_system_doctype(self):
|
||||
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# We can handle a namespaced doctype with a system ID.
|
||||
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# Test a namespaced doctype with a public id.
|
||||
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out more or less the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
This is important because the builder is part of a
|
||||
BeautifulSoup object, and we want to be able to copy that.
|
||||
"""
|
||||
copy.deepcopy(self.default_builder)
|
||||
|
||||
def test_p_tag_is_never_empty_element(self):
|
||||
"""A <p> tag is never designated as an empty-element tag.
|
||||
|
||||
Even if the markup shows it as an empty-element tag, it
|
||||
shouldn't be presented that way.
|
||||
"""
|
||||
soup = self.soup("<p/>")
|
||||
self.assertFalse(soup.p.is_empty_element)
|
||||
self.assertEqual(str(soup.p), "<p></p>")
|
||||
|
||||
def test_unclosed_tags_get_closed(self):
|
||||
"""A tag that's not closed by the end of the document should be closed.
|
||||
|
||||
This applies to all tags except empty-element tags.
|
||||
"""
|
||||
self.assertSoupEquals("<p>", "<p></p>")
|
||||
self.assertSoupEquals("<b>", "<b></b>")
|
||||
|
||||
self.assertSoupEquals("<br>", "<br/>")
|
||||
|
||||
def test_br_is_always_empty_element_tag(self):
|
||||
"""A <br> tag is designated as an empty-element tag.
|
||||
|
||||
Some parsers treat <br></br> as one <br/> tag, some parsers as
|
||||
two tags, but it should always be an empty-element tag.
|
||||
"""
|
||||
soup = self.soup("<br></br>")
|
||||
self.assertTrue(soup.br.is_empty_element)
|
||||
self.assertEqual(str(soup.br), "<br/>")
|
||||
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
self.assertSoupEquals(markup)
|
||||
|
||||
soup = self.soup(markup)
|
||||
comment = soup.find(text="foobar")
|
||||
self.assertEqual(comment.__class__, Comment)
|
||||
|
||||
# The comment is properly integrated into the tree.
|
||||
foo = soup.find(text="foo")
|
||||
self.assertEqual(comment, foo.next_element)
|
||||
baz = soup.find(text="baz")
|
||||
self.assertEqual(comment, baz.previous_element)
|
||||
|
||||
def test_preserved_whitespace_in_pre_and_textarea(self):
|
||||
"""Whitespace must be preserved in <pre> and <textarea> tags."""
|
||||
self.assertSoupEquals("<pre> </pre>")
|
||||
self.assertSoupEquals("<textarea> woo </textarea>")
|
||||
|
||||
def test_nested_inline_elements(self):
|
||||
"""Inline elements can be nested indefinitely."""
|
||||
b_tag = "<b>Inside a B tag</b>"
|
||||
self.assertSoupEquals(b_tag)
|
||||
|
||||
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
def test_nested_block_level_elements(self):
|
||||
"""Block elements can be nested."""
|
||||
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
|
||||
blockquote = soup.blockquote
|
||||
self.assertEqual(blockquote.p.b.string, 'Foo')
|
||||
self.assertEqual(blockquote.b.string, 'Foo')
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""One table can go inside another one."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tr><td>foo</td></tr></table>'
|
||||
'</td></tr></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_deeply_nested_multivalued_attribute(self):
|
||||
# html5lib can set the attributes of the same tag many times
|
||||
# as it rearranges the tree. This has caused problems with
|
||||
# multivalued attributes.
|
||||
markup = '<table><div><div class="css"></div></div></table>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
|
||||
def test_quot_entity_converted_to_quotation_mark(self):
|
||||
self.assertSoupEquals("<p>I said "good day!"</p>",
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
|
||||
def test_multipart_strings(self):
|
||||
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
very least they should not choke on namespaces or lose
|
||||
data."""
|
||||
|
||||
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode())
|
||||
html = soup.html
|
||||
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
|
||||
|
||||
def test_multivalued_attribute_value_becomes_list(self):
|
||||
markup = b'<a class="foo bar">'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(['foo', 'bar'], soup.a['class'])
|
||||
|
||||
#
|
||||
# Generally speaking, tests below this point are more tests of
|
||||
# Beautiful Soup than tests of the tree builders. But parsers are
|
||||
# weird, so we run these tests separately for every tree builder
|
||||
# to detect any differences between them.
|
||||
#
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
|
||||
parse_only=strainer)
|
||||
self.assertEqual(soup.decode(), "<b>bold</b>")
|
||||
|
||||
def test_single_quote_attribute_values_become_double_quotes(self):
|
||||
self.assertSoupEquals("<foo attr='bar'></foo>",
|
||||
'<foo attr="bar"></foo>')
|
||||
|
||||
def test_attribute_values_with_nested_quotes_are_left_alone(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
self.assertSoupEquals(text)
|
||||
|
||||
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
soup = self.soup(text)
|
||||
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
|
||||
self.assertSoupEquals(
|
||||
soup.foo.decode(),
|
||||
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
|
||||
|
||||
def test_ampersand_in_attribute_value_gets_escaped(self):
|
||||
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
|
||||
'<this is="really messed up & stuff"></this>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>',
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>')
|
||||
|
||||
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
|
||||
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
|
||||
|
||||
def test_entities_in_strings_converted_during_parsing(self):
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
# Microsoft smart quotes are converted to Unicode characters during
|
||||
# parsing.
|
||||
quote = b"<p>\x91Foo\x92</p>"
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
def test_real_iso_latin_document(self):
|
||||
# Smoke test of interrelated functionality, using an
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
iso_latin_html = unicode_html.encode("iso-8859-1")
|
||||
|
||||
# Parse the ISO-Latin-1 HTML.
|
||||
soup = self.soup(iso_latin_html)
|
||||
# Encode it to UTF-8.
|
||||
result = soup.encode("utf-8")
|
||||
|
||||
# What do we expect the result to look like? Well, it would
|
||||
# look like unicode_html, except that the META tag would say
|
||||
# UTF-8 instead of ISO-Latin-1.
|
||||
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
|
||||
|
||||
# And, of course, it would be in UTF-8, not Unicode.
|
||||
expected = expected.encode("utf-8")
|
||||
|
||||
# Ta-da!
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_real_shift_jis_document(self):
|
||||
# Smoke test to make sure the parser can handle a document in
|
||||
# Shift-JIS encoding, without choking.
|
||||
shift_jis_html = (
|
||||
b'<html><head></head><body><pre>'
|
||||
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
|
||||
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
|
||||
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
|
||||
b'</pre></body></html>')
|
||||
unicode_html = shift_jis_html.decode("shift-jis")
|
||||
soup = self.soup(unicode_html)
|
||||
|
||||
# Make sure the parse tree is correctly encoded to various
|
||||
# encodings.
|
||||
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
|
||||
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
|
||||
|
||||
def test_real_hebrew_document(self):
|
||||
# A real-world test to make sure we can convert ISO-8859-9 (a
|
||||
# Hebrew encoding) to UTF-8.
|
||||
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
|
||||
soup = self.soup(
|
||||
hebrew_document, from_encoding="iso8859-8")
|
||||
self.assertEqual(soup.original_encoding, 'iso8859-8')
|
||||
self.assertEqual(
|
||||
soup.encode('utf-8'),
|
||||
hebrew_document.decode("iso8859-8").encode("utf-8"))
|
||||
|
||||
def test_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta content="text/html; charset=x-sjis" '
|
||||
'http-equiv="Content-type"/>')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
|
||||
content = parsed_meta['content']
|
||||
self.assertEqual('text/html; charset=x-sjis', content)
|
||||
|
||||
# But that value is actually a ContentMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
|
||||
|
||||
# For the rest of the story, see TestSubstitutions in
|
||||
# test_tree.py.
|
||||
|
||||
def test_html5_style_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', id="encoding")
|
||||
charset = parsed_meta['charset']
|
||||
self.assertEqual('x-sjis', charset)
|
||||
|
||||
# But that value is actually a CharsetMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('utf8', charset.encode("utf8"))
|
||||
|
||||
def test_tag_with_no_attributes_can_have_attributes_added(self):
|
||||
data = self.soup("<a>text</a>")
|
||||
data.a['foo'] = 'bar'
|
||||
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8"), markup)
|
||||
|
||||
def test_formatter_processes_script_tag_for_xml_documents(self):
|
||||
doc = """
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
encoded = soup.encode()
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode("latin1"),
|
||||
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
|
||||
|
||||
def test_large_xml_document(self):
|
||||
"""A large XML document should come out the same as it went in."""
|
||||
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
|
||||
+ b'0' * (2**12)
|
||||
+ b'</root>')
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(soup.encode("utf-8"), markup)
|
||||
|
||||
|
||||
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
|
||||
self.assertSoupEquals("<p>", "<p/>")
|
||||
self.assertSoupEquals("<p>foo</p>")
|
||||
|
||||
def test_namespaces_are_preserved(self):
|
||||
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
|
||||
soup = self.soup(markup)
|
||||
root = soup.root
|
||||
self.assertEqual("http://example.com/", root['xmlns:a'])
|
||||
self.assertEqual("http://example.net/", root['xmlns:b'])
|
||||
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
|
||||
# XHTML documents in any particular way.
|
||||
pass
|
||||
|
||||
def test_html_tags_have_namespace(self):
|
||||
markup = "<a>"
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
|
||||
|
||||
def test_svg_tags_have_namespace(self):
|
||||
markup = '<svg><circle/></svg>'
|
||||
soup = self.soup(markup)
|
||||
namespace = "http://www.w3.org/2000/svg"
|
||||
self.assertEqual(namespace, soup.svg.namespace)
|
||||
self.assertEqual(namespace, soup.circle.namespace)
|
||||
|
||||
|
||||
def test_mathml_tags_have_namespace(self):
|
||||
markup = '<math><msqrt>5</msqrt></math>'
|
||||
soup = self.soup(markup)
|
||||
namespace = 'http://www.w3.org/1998/Math/MathML'
|
||||
self.assertEqual(namespace, soup.math.namespace)
|
||||
self.assertEqual(namespace, soup.msqrt.namespace)
|
||||
|
||||
def test_xml_declaration_becomes_comment(self):
|
||||
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertTrue(isinstance(soup.contents[0], Comment))
|
||||
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
|
||||
self.assertEqual("html", soup.contents[0].next_element.name)
|
||||
|
||||
def skipIf(condition, reason):
|
||||
def nothing(test, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def decorator(test_item):
|
||||
if condition:
|
||||
return nothing
|
||||
else:
|
||||
return test_item
|
||||
|
||||
return decorator
|
||||
@@ -1 +0,0 @@
|
||||
"The beautifulsoup tests."
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
builder_registry as registry,
|
||||
HTMLParserTreeBuilder,
|
||||
TreeBuilderRegistry,
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError:
|
||||
HTML5LIB_PRESENT = False
|
||||
|
||||
try:
|
||||
from bs4.builder import (
|
||||
LXMLTreeBuilderForXML,
|
||||
LXMLTreeBuilder,
|
||||
)
|
||||
LXML_PRESENT = True
|
||||
except ImportError:
|
||||
LXML_PRESENT = False
|
||||
|
||||
|
||||
class BuiltInRegistryTest(unittest.TestCase):
|
||||
"""Test the built-in registry with the default builders registered."""
|
||||
|
||||
def test_combination(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('fast', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('permissive', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('strict', 'html'),
|
||||
HTMLParserTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib', 'html'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
def test_lookup_by_markup_type(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
|
||||
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('xml'), None)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
|
||||
|
||||
def test_named_library(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('lxml', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('lxml', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
self.assertEqual(registry.lookup('html.parser'),
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
self.assertRaises(ValueError, BeautifulSoup,
|
||||
"", features="no-such-feature")
|
||||
|
||||
class RegistryTest(unittest.TestCase):
|
||||
"""Test the TreeBuilderRegistry class in general."""
|
||||
|
||||
def setUp(self):
|
||||
self.registry = TreeBuilderRegistry()
|
||||
|
||||
def builder_for_features(self, *feature_list):
|
||||
cls = type('Builder_' + '_'.join(feature_list),
|
||||
(object,), {'features' : feature_list})
|
||||
|
||||
self.registry.register(cls)
|
||||
return cls
|
||||
|
||||
def test_register_with_no_features(self):
|
||||
builder = self.builder_for_features()
|
||||
|
||||
# Since the builder advertises no features, you can't find it
|
||||
# by looking up features.
|
||||
self.assertEqual(self.registry.lookup('foo'), None)
|
||||
|
||||
# But you can find it by doing a lookup with no features, if
|
||||
# this happens to be the only registered builder.
|
||||
self.assertEqual(self.registry.lookup(), builder)
|
||||
|
||||
def test_register_with_features_makes_lookup_succeed(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('foo'), builder)
|
||||
self.assertEqual(self.registry.lookup('bar'), builder)
|
||||
|
||||
def test_lookup_fails_when_no_builder_implements_feature(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('baz'), None)
|
||||
|
||||
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
|
||||
builder1 = self.builder_for_features('foo')
|
||||
builder2 = self.builder_for_features('bar')
|
||||
self.assertEqual(self.registry.lookup(), builder2)
|
||||
|
||||
def test_lookup_fails_when_no_tree_builders_registered(self):
|
||||
self.assertEqual(self.registry.lookup(), None)
|
||||
|
||||
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
|
||||
has_one = self.builder_for_features('foo')
|
||||
has_the_other = self.builder_for_features('bar')
|
||||
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
|
||||
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
|
||||
lacks_one = self.builder_for_features('bar')
|
||||
has_the_other = self.builder_for_features('foo')
|
||||
|
||||
# There are two builders featuring 'foo' and 'bar', but
|
||||
# the one that also features 'quux' was registered later.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar'),
|
||||
has_both_late)
|
||||
|
||||
# There is only one builder featuring 'foo', 'bar', and 'baz'.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
|
||||
has_both_early)
|
||||
|
||||
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
|
||||
builder1 = self.builder_for_features('foo', 'bar')
|
||||
builder2 = self.builder_for_features('foo', 'baz')
|
||||
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
||||
@@ -1,36 +0,0 @@
|
||||
"Test harness for doctests."
|
||||
|
||||
# pylint: disable-msg=E0611,W0142
|
||||
|
||||
__metaclass__ = type
|
||||
__all__ = [
|
||||
'additional_tests',
|
||||
]
|
||||
|
||||
import atexit
|
||||
import doctest
|
||||
import os
|
||||
#from pkg_resources import (
|
||||
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
|
||||
import unittest
|
||||
|
||||
DOCTEST_FLAGS = (
|
||||
doctest.ELLIPSIS |
|
||||
doctest.NORMALIZE_WHITESPACE |
|
||||
doctest.REPORT_NDIFF)
|
||||
|
||||
|
||||
# def additional_tests():
|
||||
# "Run the doc tests (README.txt and docs/*, if any exist)"
|
||||
# doctest_files = [
|
||||
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
|
||||
# if resource_exists('bs4', 'docs'):
|
||||
# for name in resource_listdir('bs4', 'docs'):
|
||||
# if name.endswith('.txt'):
|
||||
# doctest_files.append(
|
||||
# os.path.abspath(
|
||||
# resource_filename('bs4', 'docs/%s' % name)))
|
||||
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
|
||||
# atexit.register(cleanup_resources)
|
||||
# return unittest.TestSuite((
|
||||
# doctest.DocFileSuite(*doctest_files, **kwargs)))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Tests to ensure that the html5lib tree builder generates good trees."""
|
||||
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
HTML5TreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not HTML5LIB_PRESENT,
|
||||
"html5lib seems not to be present, not testing its tree builder.")
|
||||
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
"""See ``HTML5TreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTML5TreeBuilder()
|
||||
|
||||
def test_soupstrainer(self):
|
||||
# The html5lib tree builder does not support SoupStrainers.
|
||||
strainer = SoupStrainer("b")
|
||||
markup = "<p>A <b>bold</b> statement.</p>"
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(
|
||||
soup.decode(), self.document_for(markup))
|
||||
|
||||
self.assertTrue(
|
||||
"the html5lib tree builder doesn't support parse_only" in
|
||||
str(w[0].message))
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""html5lib inserts <tbody> tags where other parsers don't."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tbody><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
|
||||
'</td></tr></tbody></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_xml_declaration_followed_by_doctype(self):
|
||||
markup = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<p>foo</p>
|
||||
</body>
|
||||
</html>'''
|
||||
soup = self.soup(markup)
|
||||
# Verify that we can reach the <p> tag; this means the tree is connected.
|
||||
self.assertEqual(b"<p>foo</p>", soup.p.encode())
|
||||
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Tests to ensure that the html.parser tree builder generates good
|
||||
trees."""
|
||||
|
||||
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
|
||||
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTMLParserTreeBuilder()
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Tests to ensure that the lxml tree builder generates good trees."""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import lxml.etree
|
||||
LXML_PRESENT = True
|
||||
LXML_VERSION = lxml.etree.LXML_VERSION
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
LXML_VERSION = (0,)
|
||||
|
||||
if LXML_PRESENT:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, SoupStrainer
|
||||
from bs4.testing import skipIf
|
||||
from bs4.tests import test_htmlparser
|
||||
from bs4.testing import (
|
||||
HTMLTreeBuilderSmokeTest,
|
||||
XMLTreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its tree builder.")
|
||||
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilder()
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
|
||||
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
|
||||
# test if an old version of lxml is installed.
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
|
||||
"Skipping doctype test for old version of lxml to avoid segfault.")
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_beautifulstonesoup_is_xml_parser(self):
|
||||
# Make sure that the deprecated BSS class uses an xml builder
|
||||
# if one is installed.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = BeautifulStoneSoup("<b />")
|
||||
self.assertEqual(u"<b/>", unicode(soup.b))
|
||||
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""lxml strips the XML definition from an XHTML doc, which is fine."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b''),
|
||||
markup.replace(b'\n', b'').replace(
|
||||
b'<?xml version="1.0" encoding="utf-8"?>', b''))
|
||||
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its XML tree builder.")
|
||||
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilderForXML()
|
||||
@@ -1,434 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests of Beautiful Soup as a whole."""
|
||||
|
||||
import logging
|
||||
import unittest
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
SoupStrainer,
|
||||
NamespacedAttribute,
|
||||
)
|
||||
import bs4.dammit
|
||||
from bs4.dammit import (
|
||||
EntitySubstitution,
|
||||
UnicodeDammit,
|
||||
)
|
||||
from bs4.testing import (
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
LXML_PRESENT = True
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
|
||||
PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
|
||||
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
|
||||
|
||||
class TestConstructor(SoupTest):
|
||||
|
||||
def test_short_unicode_input(self):
|
||||
data = u"<h1>éé</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"éé", soup.h1.string)
|
||||
|
||||
def test_embedded_null(self):
|
||||
data = u"<h1>foo\0bar</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"foo\0bar", soup.h1.string)
|
||||
|
||||
|
||||
class TestDeprecatedConstructorArguments(SoupTest):
|
||||
|
||||
def test_parseOnlyThese_renamed_to_parse_only(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("parseOnlyThese" in msg)
|
||||
self.assertTrue("parse_only" in msg)
|
||||
self.assertEqual(b"<b></b>", soup.encode())
|
||||
|
||||
def test_fromEncoding_renamed_to_from_encoding(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
utf8 = b"\xc3\xa9"
|
||||
soup = self.soup(utf8, fromEncoding="utf8")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("fromEncoding" in msg)
|
||||
self.assertTrue("from_encoding" in msg)
|
||||
self.assertEqual("utf8", soup.original_encoding)
|
||||
|
||||
def test_unrecognized_keyword_argument(self):
|
||||
self.assertRaises(
|
||||
TypeError, self.soup, "<a>", no_such_argument=True)
|
||||
|
||||
class TestWarnings(SoupTest):
|
||||
|
||||
def test_disk_file_warning(self):
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
try:
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a filename" in msg)
|
||||
finally:
|
||||
filehandle.close()
|
||||
|
||||
# The file no longer exists, so Beautiful Soup will no longer issue the warning.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
def test_url_warning(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a URL" in msg)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/ is great")
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
class TestSelectiveParsing(SoupTest):
|
||||
|
||||
def test_parse_with_soupstrainer(self):
|
||||
markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
|
||||
|
||||
|
||||
class TestEntitySubstitution(unittest.TestCase):
|
||||
"""Standalone tests of the EntitySubstitution class."""
|
||||
def setUp(self):
|
||||
self.sub = EntitySubstitution
|
||||
|
||||
def test_simple_html_substitution(self):
|
||||
# Unicode characters corresponding to named HTML entites
|
||||
# are substituted, and no others.
|
||||
s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
|
||||
self.assertEqual(self.sub.substitute_html(s),
|
||||
u"foo∀\N{SNOWMAN}õbar")
|
||||
|
||||
def test_smart_quote_substitution(self):
|
||||
# MS smart quotes are a common source of frustration, so we
|
||||
# give them a special test.
|
||||
quotes = b"\x91\x92foo\x93\x94"
|
||||
dammit = UnicodeDammit(quotes)
|
||||
self.assertEqual(self.sub.substitute_html(dammit.markup),
|
||||
"‘’foo“”")
|
||||
|
||||
def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, False), s)
|
||||
|
||||
def test_xml_attribute_quoting_normally_uses_double_quotes(self):
|
||||
self.assertEqual(self.sub.substitute_xml("Welcome", True),
|
||||
'"Welcome"')
|
||||
self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
|
||||
'"Bob\'s Bar"')
|
||||
|
||||
def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, True),
|
||||
"'Welcome to \"my bar\"'")
|
||||
|
||||
def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
|
||||
s = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml(s, True),
|
||||
'"Welcome to "Bob\'s Bar""')
|
||||
|
||||
def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
|
||||
quoted = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(quoted), quoted)
|
||||
|
||||
def test_xml_quoting_handles_angle_brackets(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("foo<bar>"),
|
||||
"foo<bar>")
|
||||
|
||||
def test_xml_quoting_handles_ampersands(self):
|
||||
self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T")
|
||||
|
||||
def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("ÁT&T"),
|
||||
"&Aacute;T&T")
|
||||
|
||||
def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml_containing_entities("ÁT&T"),
|
||||
"ÁT&T")
|
||||
|
||||
def test_quotes_not_html_substituted(self):
|
||||
"""There's no need to do this except inside attribute values."""
|
||||
text = 'Bob\'s "bar"'
|
||||
self.assertEqual(self.sub.substitute_html(text), text)
|
||||
|
||||
|
||||
class TestEncodingConversion(SoupTest):
|
||||
# Test Beautiful Soup's ability to decode and encode from various
|
||||
# encodings.
|
||||
|
||||
def setUp(self):
|
||||
super(TestEncodingConversion, self).setUp()
|
||||
self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
|
||||
self.utf8_data = self.unicode_data.encode("utf-8")
|
||||
# Just so you know what it looks like.
|
||||
self.assertEqual(
|
||||
self.utf8_data,
|
||||
b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
|
||||
|
||||
def test_ascii_in_unicode_out(self):
|
||||
# ASCII input is converted to Unicode. The original_encoding
|
||||
# attribute is set to 'utf-8', a superset of ASCII.
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
# Disable chardet, which will realize that the ASCII is ASCII.
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
ascii = b"<foo>a</foo>"
|
||||
soup_from_ascii = self.soup(ascii)
|
||||
unicode_output = soup_from_ascii.decode()
|
||||
self.assertTrue(isinstance(unicode_output, unicode))
|
||||
self.assertEqual(unicode_output, self.document_for(ascii.decode()))
|
||||
self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_unicode_in_unicode_out(self):
|
||||
# Unicode input is left alone. The original_encoding attribute
|
||||
# is not set.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
|
||||
self.assertEqual(soup_from_unicode.original_encoding, None)
|
||||
|
||||
def test_utf8_in_unicode_out(self):
|
||||
# UTF-8 input is converted to Unicode. The original_encoding
|
||||
# attribute is set.
|
||||
soup_from_utf8 = self.soup(self.utf8_data)
|
||||
self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
|
||||
|
||||
def test_utf8_out(self):
|
||||
# The internal data structures can be encoded as UTF-8.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
|
||||
|
||||
@skipIf(
|
||||
PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
|
||||
"Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
|
||||
def test_attribute_name_containing_unicode_characters(self):
|
||||
markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
|
||||
self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
|
||||
|
||||
class TestUnicodeDammit(unittest.TestCase):
|
||||
"""Standalone tests of UnicodeDammit."""
|
||||
|
||||
def test_unicode_input(self):
|
||||
markup = u"I'm already Unicode! \N{SNOWMAN}"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(dammit.unicode_markup, markup)
|
||||
|
||||
def test_smart_quotes_to_unicode(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
|
||||
|
||||
def test_smart_quotes_to_xml_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="xml")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_html_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="html")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_ascii(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, """<foo>''""</foo>""")
|
||||
|
||||
def test_detect_utf8(self):
|
||||
utf8 = b"\xc3\xa9"
|
||||
dammit = UnicodeDammit(utf8)
|
||||
self.assertEqual(dammit.unicode_markup, u'\xe9')
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_convert_hebrew(self):
|
||||
hebrew = b"\xed\xe5\xec\xf9"
|
||||
dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
|
||||
self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
|
||||
|
||||
def test_dont_see_smart_quotes_where_there_are_none(self):
|
||||
utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
|
||||
dammit = UnicodeDammit(utf_8)
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
|
||||
|
||||
def test_ignore_inappropriate_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_ignore_invalid_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
for bad_encoding in ['.utf8', '...', 'utF---16.!']:
|
||||
dammit = UnicodeDammit(utf8_data, [bad_encoding])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_detect_html5_style_meta_tag(self):
|
||||
|
||||
for data in (
|
||||
b'<html><meta charset="euc-jp" /></html>',
|
||||
b"<html><meta charset='euc-jp' /></html>",
|
||||
b"<html><meta charset=euc-jp /></html>",
|
||||
b"<html><meta charset=euc-jp/></html>"):
|
||||
dammit = UnicodeDammit(data, is_html=True)
|
||||
self.assertEqual(
|
||||
"euc-jp", dammit.original_encoding)
|
||||
|
||||
def test_last_ditch_entity_replacement(self):
|
||||
# This is a UTF-8 document that contains bytestrings
|
||||
# completely incompatible with UTF-8 (ie. encoded with some other
|
||||
# encoding).
|
||||
#
|
||||
# Since there is no consistent encoding for the document,
|
||||
# Unicode, Dammit will eventually encode the document as UTF-8
|
||||
# and encode the incompatible characters as REPLACEMENT
|
||||
# CHARACTER.
|
||||
#
|
||||
# If chardet is installed, it will detect that the document
|
||||
# can be converted into ISO-8859-1 without errors. This happens
|
||||
# to be the wrong encoding, but it is a consistent encoding, so the
|
||||
# code we're testing here won't run.
|
||||
#
|
||||
# So we temporarily disable chardet if it's present.
|
||||
doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
|
||||
<html><b>\330\250\330\252\330\261</b>
|
||||
<i>\310\322\321\220\312\321\355\344</i></html>"""
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
dammit = UnicodeDammit(doc)
|
||||
self.assertEqual(True, dammit.contains_replacement_characters)
|
||||
self.assertTrue(u"\ufffd" in dammit.unicode_markup)
|
||||
|
||||
soup = BeautifulSoup(doc, "html.parser")
|
||||
self.assertTrue(soup.contains_replacement_characters)
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_byte_order_mark_removed(self):
|
||||
# A document written in UTF-16LE will have its byte order marker stripped.
|
||||
data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
|
||||
dammit = UnicodeDammit(data)
|
||||
self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
|
||||
self.assertEqual("utf-16le", dammit.original_encoding)
|
||||
|
||||
def test_detwingle(self):
|
||||
# Here's a UTF8 document.
|
||||
utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
|
||||
|
||||
# Here's a Windows-1252 document.
|
||||
windows_1252 = (
|
||||
u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
|
||||
u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
|
||||
|
||||
# Through some unholy alchemy, they've been stuck together.
|
||||
doc = utf8 + windows_1252 + utf8
|
||||
|
||||
# The document can't be turned into UTF-8:
|
||||
self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
|
||||
|
||||
# Unicode, Dammit thinks the whole document is Windows-1252,
|
||||
# and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
|
||||
|
||||
# But if we run it through fix_embedded_windows_1252, it's fixed:
|
||||
|
||||
fixed = UnicodeDammit.detwingle(doc)
|
||||
self.assertEqual(
|
||||
u"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
|
||||
|
||||
def test_detwingle_ignores_multibyte_characters(self):
|
||||
# Each of these characters has a UTF-8 representation ending
|
||||
# in \x93. \x93 is a smart quote if interpreted as
|
||||
# Windows-1252. But our code knows to skip over multibyte
|
||||
# UTF-8 characters, so they'll survive the process unscathed.
|
||||
for tricky_unicode_char in (
|
||||
u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
|
||||
u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
|
||||
u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
|
||||
):
|
||||
input = tricky_unicode_char.encode("utf8")
|
||||
self.assertTrue(input.endswith(b'\x93'))
|
||||
output = UnicodeDammit.detwingle(input)
|
||||
self.assertEqual(output, input)
|
||||
|
||||
class TestNamedspacedAttribute(SoupTest):
|
||||
|
||||
def test_name_may_be_none(self):
|
||||
a = NamespacedAttribute("xmlns", None)
|
||||
self.assertEqual(a, "xmlns")
|
||||
|
||||
def test_attribute_is_equivalent_to_colon_separated_string(self):
|
||||
a = NamespacedAttribute("a", "b")
|
||||
self.assertEqual("a:b", a)
|
||||
|
||||
def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
|
||||
a = NamespacedAttribute("a", "b", "c")
|
||||
b = NamespacedAttribute("a", "b", "c")
|
||||
self.assertEqual(a, b)
|
||||
|
||||
# The actual namespace is not considered.
|
||||
c = NamespacedAttribute("a", "b", None)
|
||||
self.assertEqual(a, c)
|
||||
|
||||
# But name and prefix are important.
|
||||
d = NamespacedAttribute("a", "z", "c")
|
||||
self.assertNotEqual(a, d)
|
||||
|
||||
e = NamespacedAttribute("z", "b", "c")
|
||||
self.assertNotEqual(a, e)
|
||||
|
||||
|
||||
class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = CharsetMetaAttributeValue("euc-jp")
|
||||
self.assertEqual("euc-jp", value)
|
||||
self.assertEqual("euc-jp", value.original_value)
|
||||
self.assertEqual("utf8", value.encode("utf8"))
|
||||
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = ContentMetaAttributeValue("text/html; charset=euc-jp")
|
||||
self.assertEqual("text/html; charset=euc-jp", value)
|
||||
self.assertEqual("text/html; charset=euc-jp", value.original_value)
|
||||
self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
|
||||
@@ -81,6 +81,14 @@ def getBuildEnvironmentController(**kwargs):
|
||||
raise Exception("FIXME: Implement BEC for type %s" % str(be.betype))
|
||||
|
||||
|
||||
def _getgitcheckoutdirectoryname(url):
|
||||
""" Utility that returns the last component of a git path as directory
|
||||
"""
|
||||
import re
|
||||
components = re.split(r'[:\.\/]', url)
|
||||
return components[-2] if components[-1] == "git" else components[-1]
|
||||
|
||||
|
||||
class BuildEnvironmentController(object):
|
||||
""" BuildEnvironmentController (BEC) is the abstract class that defines the operations that MUST
|
||||
or SHOULD be supported by a Build Environment. It is used to establish the framework, and must
|
||||
@@ -109,43 +117,19 @@ class BuildEnvironmentController(object):
|
||||
self.be = be
|
||||
self.connection = None
|
||||
|
||||
@staticmethod
|
||||
def _updateBBLayers(bblayerconf, layerlist):
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
skip = 0
|
||||
for i in xrange(len(conflines)):
|
||||
if skip > 0:
|
||||
skip =- 1
|
||||
continue
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
skip = 1
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
bblayerconffile.write("# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
|
||||
def writeConfFile(self, variable_list = None, raw = None):
|
||||
""" Writes a configuration file in the build directory. Override with buildenv-specific implementation. """
|
||||
raise Exception("FIXME: Must override to actually write a configuration file")
|
||||
|
||||
|
||||
def startBBServer(self):
|
||||
""" Starts a BB server with Toaster toasterui set up to record the builds, an no controlling UI.
|
||||
After this method executes, self.be bbaddress/bbport MUST point to a running and free server,
|
||||
and the bbstate MUST be updated to "started".
|
||||
"""
|
||||
raise Exception("FIXME: Must override in order to actually start the BB server")
|
||||
raise Exception("Must override in order to actually start the BB server")
|
||||
|
||||
def stopBBServer(self):
|
||||
""" Stops the currently running BB server.
|
||||
The bbstate MUST be updated to "stopped".
|
||||
self.connection must be none.
|
||||
"""
|
||||
raise Exception("FIXME: Must override stoBBServer")
|
||||
|
||||
def setLayers(self, bbs, ls):
|
||||
""" Checks-out bitbake executor and layers from git repositories.
|
||||
@@ -155,7 +139,7 @@ class BuildEnvironmentController(object):
|
||||
|
||||
a word of attention: by convention, the first layer for any build will be poky!
|
||||
"""
|
||||
raise Exception("FIXME: Must override setLayers")
|
||||
raise Exception("Must override setLayers")
|
||||
|
||||
|
||||
def getBBController(self):
|
||||
|
||||
@@ -30,12 +30,7 @@ import subprocess
|
||||
|
||||
from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
from pprint import pprint, pformat
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
|
||||
|
||||
class LocalhostBEController(BuildEnvironmentController):
|
||||
""" Implementation of the BuildEnvironmentController for the localhost;
|
||||
@@ -54,19 +49,15 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
if cwd is None:
|
||||
cwd = self.be.sourcedir
|
||||
|
||||
#logger.debug("lbc_shellcmmd: (%s) %s" % (cwd, command))
|
||||
p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out,err) = p.communicate()
|
||||
p.wait()
|
||||
if p.returncode:
|
||||
if len(err) == 0:
|
||||
err = "command: %s \n%s" % (command, out)
|
||||
else:
|
||||
err = "command: %s \n%s" % (command, err)
|
||||
#logger.warn("localhostbecontroller: shellcmd error %s" % err)
|
||||
raise ShellCmdException(err)
|
||||
else:
|
||||
#logger.debug("localhostbecontroller: shellcmd success")
|
||||
return out
|
||||
|
||||
def _createdirpath(self, path):
|
||||
@@ -82,118 +73,29 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
self._createdirpath(self.be.builddir)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
|
||||
# delete the templateconf.cfg; it may come from an unsupported layer configuration
|
||||
os.remove(os.path.join(self.be.builddir, "conf/templateconf.cfg"))
|
||||
|
||||
|
||||
def writeConfFile(self, file_name, variable_list = None, raw = None):
|
||||
filepath = os.path.join(self.be.builddir, file_name)
|
||||
with open(filepath, "w") as conffile:
|
||||
if variable_list is not None:
|
||||
for i in variable_list:
|
||||
conffile.write("%s=\"%s\"\n" % (i.name, i.value))
|
||||
if raw is not None:
|
||||
conffile.write(raw)
|
||||
|
||||
|
||||
def startBBServer(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
|
||||
# find our own toasterui listener/bitbake
|
||||
from toaster.bldcontrol.management.commands.loadconf import _reduce_canon_path
|
||||
|
||||
own_bitbake = _reduce_canon_path(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../bin/bitbake"))
|
||||
|
||||
assert os.path.exists(own_bitbake) and os.path.isfile(own_bitbake)
|
||||
|
||||
logger.debug("localhostbecontroller: running the listener at %s" % own_bitbake)
|
||||
|
||||
toaster_ui_log_filepath = os.path.join(self.be.builddir, "toaster_ui.log")
|
||||
# get the file length; we need to detect the _last_ start of the toaster UI, not the first
|
||||
toaster_ui_log_filelength = 0
|
||||
if os.path.exists(toaster_ui_log_filepath):
|
||||
with open(toaster_ui_log_filepath, "r") as f:
|
||||
f.seek(0, 2) # jump to the end
|
||||
toaster_ui_log_filelength = f.tell()
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env %s 2>&1 >toaster_server.log && bitbake --read conf/toaster-pre.conf --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0 2>&1 >toaster_server.log && DATABASE_URL=%s BBSERVER=0.0.0.0:-1 daemon -d -i -D %s -o toaster_ui.log -- %s --observe-only -u toasterui &\"" % (self.pokydirname, self.be.builddir,
|
||||
self.dburl, self.be.builddir, own_bitbake)
|
||||
port = "-1"
|
||||
logger.debug("localhostbecontroller: starting builder \n%s\n" % cmd)
|
||||
cmdoutput = self._shellcmd(cmd)
|
||||
for i in cmdoutput.split("\n"):
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
logger.debug("localhostbecontroller: Found bitbake server port %s" % port)
|
||||
|
||||
def _toaster_ui_started(filepath, filepos = 0):
|
||||
if not os.path.exists(filepath):
|
||||
return False
|
||||
with open(filepath, "r") as f:
|
||||
f.seek(filepos)
|
||||
for line in f:
|
||||
if line.startswith("NOTE: ToasterUI waiting for events"):
|
||||
return True
|
||||
return False
|
||||
|
||||
retries = 0
|
||||
started = False
|
||||
while not started and retries < 10:
|
||||
started = _toaster_ui_started(toaster_ui_log_filepath, toaster_ui_log_filelength)
|
||||
import time
|
||||
logger.debug("localhostbecontroller: Waiting bitbake server to start")
|
||||
time.sleep(0.5)
|
||||
retries += 1
|
||||
|
||||
if not started:
|
||||
toaster_server_log = open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read()
|
||||
raise BuildSetupException("localhostbecontroller: Bitbake server did not start in 5 seconds, aborting (Error: '%s' '%s')" % (cmdoutput, toaster_server_log))
|
||||
|
||||
logger.debug("localhostbecontroller: Started bitbake server")
|
||||
|
||||
while port == "-1":
|
||||
# the port specification is "autodetect"; read the bitbake.lock file
|
||||
with open("%s/bitbake.lock" % self.be.builddir, "r") as f:
|
||||
for line in f.readlines():
|
||||
if ":" in line:
|
||||
port = line.split(":")[1].strip()
|
||||
logger.debug("localhostbecontroller: Autodetected bitbake port %s", port)
|
||||
break
|
||||
|
||||
print("DEBUG: executing ", "bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
# FIXME unfortunate sleep 1 - we need to make sure that bbserver is started and the toaster ui is connected
|
||||
# but since they start async without any return, we just wait a bit
|
||||
print "Started server"
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
self.be.bbaddress = "localhost"
|
||||
self.be.bbport = port
|
||||
self.be.bbport = "8200"
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STARTED
|
||||
self.be.save()
|
||||
|
||||
def stopBBServer(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
|
||||
(self.pokydirname, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STOPPED
|
||||
self.be.save()
|
||||
logger.debug("localhostbecontroller: Stopped bitbake server")
|
||||
|
||||
def getGitCloneDirectory(self, url, branch):
|
||||
""" Utility that returns the last component of a git path as directory
|
||||
"""
|
||||
import re
|
||||
components = re.split(r'[:\.\/]', url)
|
||||
base = components[-2] if components[-1] == "git" else components[-1]
|
||||
|
||||
if branch != "HEAD":
|
||||
return "_%s_%s.toaster_cloned" % (base, branch)
|
||||
|
||||
|
||||
# word of attention; this is a localhost-specific issue; only on the localhost we expect to have "HEAD" releases
|
||||
# which _ALWAYS_ means the current poky checkout
|
||||
from os.path import dirname as DN
|
||||
local_checkout_path = DN(DN(DN(DN(DN(os.path.abspath(__file__))))))
|
||||
#logger.debug("localhostbecontroller: using HEAD checkout in %s" % local_checkout_path)
|
||||
return local_checkout_path
|
||||
|
||||
print "Stopped server"
|
||||
|
||||
def setLayers(self, bitbakes, layers):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
@@ -202,109 +104,86 @@ class LocalhostBEController(BuildEnvironmentController):
|
||||
assert len(bitbakes) == 1
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos with branches, and map dirpaths for each layer
|
||||
# 1. get a list of repos, and map dirpaths for each layer
|
||||
gitrepos = {}
|
||||
|
||||
gitrepos[(bitbakes[0].giturl, bitbakes[0].commit)] = []
|
||||
gitrepos[(bitbakes[0].giturl, bitbakes[0].commit)].append( ("bitbake", bitbakes[0].dirpath) )
|
||||
|
||||
gitrepos[bitbakes[0].giturl] = []
|
||||
gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
|
||||
|
||||
for layer in layers:
|
||||
# we don't process local URLs
|
||||
if layer.giturl.startswith("file://"):
|
||||
continue
|
||||
if not (layer.giturl, layer.commit) in gitrepos:
|
||||
gitrepos[(layer.giturl, layer.commit)] = []
|
||||
gitrepos[(layer.giturl, layer.commit)].append( (layer.name, layer.dirpath) )
|
||||
if not layer.giturl in gitrepos:
|
||||
gitrepos[layer.giturl] = []
|
||||
gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
|
||||
for giturl in gitrepos.keys():
|
||||
commitid = gitrepos[giturl][0][2]
|
||||
for e in gitrepos[giturl]:
|
||||
if commitid != e[2]:
|
||||
raise BuildSetupException("More than one commit per git url, unsupported configuration")
|
||||
|
||||
|
||||
logger.debug("localhostbecontroller, our git repos are %s" % pformat(gitrepos))
|
||||
|
||||
|
||||
# 2. find checked-out git repos in the sourcedir directory that may help faster cloning
|
||||
|
||||
cached_layers = {}
|
||||
for ldir in os.listdir(self.be.sourcedir):
|
||||
fldir = os.path.join(self.be.sourcedir, ldir)
|
||||
if os.path.isdir(fldir):
|
||||
try:
|
||||
for line in self._shellcmd("git remote -v", fldir).split("\n"):
|
||||
try:
|
||||
remote = line.split("\t")[1].split(" ")[0]
|
||||
if remote not in cached_layers:
|
||||
cached_layers[remote] = fldir
|
||||
except IndexError:
|
||||
pass
|
||||
except ShellCmdException:
|
||||
# ignore any errors in collecting git remotes
|
||||
pass
|
||||
|
||||
layerlist = []
|
||||
|
||||
|
||||
# 3. checkout the repositories
|
||||
for giturl, commit in gitrepos.keys():
|
||||
localdirname = os.path.join(self.be.sourcedir, self.getGitCloneDirectory(giturl, commit))
|
||||
logger.debug("localhostbecontroller: giturl %s:%s checking out in current directory %s" % (giturl, commit, localdirname))
|
||||
# 2. checkout the repositories
|
||||
for giturl in gitrepos.keys():
|
||||
localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
|
||||
print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
|
||||
|
||||
# make sure our directory is a git repository
|
||||
if os.path.exists(localdirname):
|
||||
localremotes = self._shellcmd("git remote -v", localdirname)
|
||||
if not giturl in localremotes:
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes ('%s', expected '%s'). Toaster will not continue out of fear of damaging something." % (localdirname, ", ".join(localremotes.split("\n")), giturl))
|
||||
if not giturl in self._shellcmd("git remote -v", localdirname):
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
|
||||
else:
|
||||
if giturl in cached_layers:
|
||||
logger.debug("localhostbecontroller git-copying %s to %s" % (cached_layers[giturl], localdirname))
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (cached_layers[giturl], localdirname))
|
||||
self._shellcmd("git remote remove origin", localdirname)
|
||||
self._shellcmd("git remote add origin \"%s\"" % giturl, localdirname)
|
||||
else:
|
||||
logger.debug("localhostbecontroller: cloning %s:%s in %s" % (giturl, commit, localdirname))
|
||||
self._shellcmd("git clone \"%s\" --single-branch --branch \"%s\" \"%s\"" % (giturl, commit, localdirname))
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
|
||||
# checkout the needed commit
|
||||
commit = gitrepos[giturl][0][2]
|
||||
|
||||
# branch magic name "HEAD" will inhibit checkout
|
||||
if commit != "HEAD":
|
||||
logger.debug("localhostbecontroller: checking out commit %s to %s " % (commit, localdirname))
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\" && git rebase \"origin/%s\"" % (commit, commit) , localdirname)
|
||||
print "DEBUG: checking out commit ", commit, "to", localdirname
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
|
||||
|
||||
# take the localdirname as poky dir if we can find the oe-init-build-env
|
||||
if self.pokydirname is None and os.path.exists(os.path.join(localdirname, "oe-init-build-env")):
|
||||
logger.debug("localhostbecontroller: selected poky dir name %s" % localdirname)
|
||||
print "DEBUG: selected poky dir name", localdirname
|
||||
self.pokydirname = localdirname
|
||||
|
||||
# make sure we have a working bitbake
|
||||
if not os.path.exists(os.path.join(self.pokydirname, 'bitbake')):
|
||||
logger.debug("localhostbecontroller: checking bitbake into the poky dirname %s " % self.pokydirname)
|
||||
self._shellcmd("git clone -b \"%s\" \"%s\" \"%s\" " % (bitbakes[0].commit, bitbakes[0].giturl, os.path.join(self.pokydirname, 'bitbake')))
|
||||
|
||||
# verify our repositories
|
||||
for name, dirpath in gitrepos[(giturl, commit)]:
|
||||
for name, dirpath, commit in gitrepos[giturl]:
|
||||
localdirpath = os.path.join(localdirname, dirpath)
|
||||
logger.debug("localhostbecontroller: localdirpath expected '%s'" % localdirpath)
|
||||
if not os.path.exists(localdirpath):
|
||||
raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
|
||||
|
||||
if name != "bitbake":
|
||||
layerlist.append(localdirpath.rstrip("/"))
|
||||
layerlist.append(localdirpath)
|
||||
|
||||
logger.debug("localhostbecontroller: current layer list %s " % pformat(layerlist))
|
||||
print "DEBUG: current layer list ", layerlist
|
||||
|
||||
# 4. configure the build environment, so we have a conf/bblayers.conf
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
|
||||
# 5. update the bblayers.conf
|
||||
# 4. update the bblayers.conf
|
||||
bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
|
||||
if not os.path.exists(bblayerconf):
|
||||
raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
|
||||
|
||||
BuildEnvironmentController._updateBBLayers(bblayerconf, layerlist)
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
for i in xrange(len(conflines)):
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
i += 2
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
bblayerconffile.write("\n# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
self.islayerset = True
|
||||
return True
|
||||
|
||||
def readServerLogFile(self):
|
||||
return open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read()
|
||||
|
||||
def release(self):
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
import shutil
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
from orm.models import ToasterSetting
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment
|
||||
import os
|
||||
|
||||
def DN(path):
|
||||
@@ -16,6 +17,16 @@ class Command(NoArgsCommand):
|
||||
args = ""
|
||||
help = "Verifies that the configured settings are valid and usable, or prompts the user to fix the settings."
|
||||
|
||||
def _reduce_canon_path(self, path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
return "/".join(components)
|
||||
|
||||
def _find_first_path_for_file(self, startdirectory, filename, level = 0):
|
||||
if level < 0:
|
||||
@@ -34,22 +45,6 @@ class Command(NoArgsCommand):
|
||||
return ret
|
||||
return None
|
||||
|
||||
def _recursive_list_directories(self, startdirectory, level = 0):
|
||||
if level < 0:
|
||||
return []
|
||||
dirs = []
|
||||
try:
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
except OSError:
|
||||
pass
|
||||
for j in dirs:
|
||||
dirs = dirs + self._recursive_list_directories(j, level - 1)
|
||||
return dirs
|
||||
|
||||
|
||||
def _get_suggested_sourcedir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
@@ -58,140 +53,151 @@ class Command(NoArgsCommand):
|
||||
def _get_suggested_builddir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
|
||||
return DN(self._find_first_path_for_file(self.guesspath, "bblayers.conf", 3))
|
||||
|
||||
def _import_layer_config(self, baselayerdir):
|
||||
filepath = os.path.join(baselayerdir, "meta/conf/toasterconf.json")
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
import json, pprint
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
bvo.giturl = bvi['giturl']
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
if lsi['sourcetype'] == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
apiurl = self._reduce_canon_path(os.path.join(DN(filepath), lsi['apiurl']))
|
||||
|
||||
try:
|
||||
ls = LayerSource.objects.get(sourcetype = lsi['sourcetype'], apiurl = apiurl)
|
||||
except LayerSource.DoesNotExist:
|
||||
ls = LayerSource.objects.create(
|
||||
name = lsi['name'],
|
||||
sourcetype = lsi['sourcetype'],
|
||||
apiurl = apiurl
|
||||
)
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = self._reduce_canon_path(os.path.join(DN(filepath), layerinfo['local_path']))
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
if 'vcs_url' in layerinfo:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo)
|
||||
ro.description = ri['description']
|
||||
ro.branch = ri['branch']
|
||||
ro.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
lsi, layername = dli.split(":")
|
||||
layer, created = Layer.objects.get_or_create(
|
||||
layer_source = LayerSource.objects.get(name = lsi),
|
||||
name = layername
|
||||
)
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer = layer)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
def handle(self, **options):
|
||||
# verify that we have a settings for downloading artifacts
|
||||
while ToasterSetting.objects.filter(name="ARTIFACTS_STORAGE_DIR").count() == 0:
|
||||
guessedpath = os.getcwd() + "/toaster_build_artifacts/"
|
||||
print("Toaster needs to know in which directory it can download build log files and other artifacts.\n Toaster suggests \"%s\"." % guessedpath)
|
||||
artifacts_storage_dir = raw_input(" Press Enter to select \"%s\" or type the full path to a different directory: " % guessedpath)
|
||||
if len(artifacts_storage_dir) == 0:
|
||||
artifacts_storage_dir = guessedpath
|
||||
if len(artifacts_storage_dir) > 0 and artifacts_storage_dir.startswith("/"):
|
||||
try:
|
||||
os.makedirs(artifacts_storage_dir)
|
||||
except OSError as ose:
|
||||
if "File exists" in str(ose):
|
||||
pass
|
||||
else:
|
||||
raise ose
|
||||
ToasterSetting.objects.create(name="ARTIFACTS_STORAGE_DIR", value=artifacts_storage_dir)
|
||||
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
# refuse to start if we have no build environments
|
||||
while BuildEnvironment.objects.count() == 0:
|
||||
print(" !! No build environments found. Toaster needs at least one build environment in order to be able to run builds.\n" +
|
||||
"You can manually define build environments in the database table bldcontrol_buildenvironment.\n" +
|
||||
"Or Toaster can define a simple localhost-based build environment for you.")
|
||||
|
||||
i = raw_input(" -- Do you want to create a basic localhost build environment ? (Y/n) ");
|
||||
if not len(i) or i.startswith("y") or i.startswith("Y"):
|
||||
BuildEnvironment.objects.create(pk = 1, betype = 0)
|
||||
else:
|
||||
raise Exception("Toaster cannot start without build environments. Aborting.")
|
||||
|
||||
|
||||
# we make sure we have builddir and sourcedir for all defined build envionments
|
||||
for be in BuildEnvironment.objects.all():
|
||||
be.needs_import = False
|
||||
def _verify_be():
|
||||
is_changed = False
|
||||
print("Verifying the Build Environment. If the local Build Environment is not properly configured, you will be asked to configure it.")
|
||||
|
||||
def _update_sourcedir():
|
||||
print("Verifying the Build Environment type %s id %d." % (be.get_betype_display(), be.pk))
|
||||
if len(be.sourcedir) == 0:
|
||||
suggesteddir = self._get_suggested_sourcedir(be)
|
||||
if len(suggesteddir) > 0:
|
||||
be.sourcedir = raw_input("Toaster needs to know in which directory it should check out the layers that will be needed for your builds.\n Toaster suggests \"%s\". If you select this directory, a layer like \"meta-intel\" will end up in \"%s/meta-intel\".\n Press Enter to select \"%s\" or type the full path to a different directory (must be a parent of current checkout directory): " % (suggesteddir, suggesteddir, suggesteddir))
|
||||
else:
|
||||
be.sourcedir = raw_input("Toaster needs to know in which directory it should check out the layers that will be needed for your builds. Type the full path to the directory (for example: \"%s\": " % os.environ.get('HOME', '/tmp/'))
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.sourcedir) == 0 and len(suggesteddir) > 0:
|
||||
be.sourcedir = suggesteddir
|
||||
return True
|
||||
|
||||
if len(be.sourcedir) == 0:
|
||||
print "\n -- Validation: The checkout directory must be set."
|
||||
is_changed = _update_sourcedir()
|
||||
is_changed = True
|
||||
|
||||
if not be.sourcedir.startswith("/"):
|
||||
print "\n -- Validation: The checkout directory must be set to an absolute path."
|
||||
is_changed = _update_sourcedir()
|
||||
|
||||
if not be.sourcedir in DN(__file__):
|
||||
print "\n -- Validation: The checkout directory must be a parent of the current checkout."
|
||||
is_changed = _update_sourcedir()
|
||||
|
||||
if is_changed:
|
||||
if be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
be.needs_import = True
|
||||
return True
|
||||
|
||||
def _update_builddir():
|
||||
suggesteddir = self._get_suggested_builddir(be)
|
||||
if len(suggesteddir) > 0:
|
||||
be.builddir = raw_input("Toaster needs to know where it your build directory is located.\n The build directory is where all the artifacts created by your builds will be stored. Toaster suggests \"%s\".\n Press Enter to select \"%s\" or type the full path to a different directory: " % (suggesteddir, suggesteddir))
|
||||
else:
|
||||
be.builddir = raw_input("Toaster needs to know where is your build directory.\n The build directory is where all the artifacts created by your builds will be stored. Type the full path to the directory (for example: \" %s/build\")" % os.environ.get('HOME','/tmp/'))
|
||||
if len(be.builddir) == 0 and len(suggesteddir) > 0:
|
||||
be.builddir = suggesteddir
|
||||
return True
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
if len(be.builddir) == 0:
|
||||
print "\n -- Validation: The build directory must be set."
|
||||
is_changed = _update_builddir()
|
||||
suggesteddir = self._get_suggested_builddir(be)
|
||||
be.builddir = raw_input(" -- Build directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.builddir) == 0 and len(suggesteddir) > 0:
|
||||
be.builddir = suggesteddir
|
||||
is_changed = True
|
||||
|
||||
if not be.builddir.startswith("/"):
|
||||
print "\n -- Validation: The build directory must to be set to an absolute path."
|
||||
is_changed = _update_builddir()
|
||||
be.builddir = raw_input(" -- Build directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
|
||||
|
||||
if is_changed:
|
||||
print "Build configuration saved"
|
||||
be.save()
|
||||
return True
|
||||
|
||||
|
||||
if be.needs_import:
|
||||
print "\nToaster can use a SINGLE predefined configuration file to set up default project settings and layer information sources.\n"
|
||||
|
||||
# find configuration files
|
||||
config_files = []
|
||||
for dirname in self._recursive_list_directories(be.sourcedir,2):
|
||||
if os.path.exists(os.path.join(dirname, ".templateconf")):
|
||||
import subprocess
|
||||
conffilepath, error = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE).communicate()
|
||||
conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
|
||||
candidatefilepath = os.path.join(dirname, conffilepath)
|
||||
if os.path.exists(candidatefilepath):
|
||||
config_files.append(candidatefilepath)
|
||||
|
||||
if len(config_files) > 0:
|
||||
print " Toaster will list now the configuration files that it found. Select the number to use the desired configuration file."
|
||||
for cf in config_files:
|
||||
print " [%d] - %s" % (config_files.index(cf) + 1, cf)
|
||||
print "\n [0] - Exit without importing any file"
|
||||
try:
|
||||
i = raw_input("\n Enter your option: ")
|
||||
if len(i) and (int(i) - 1 >= 0 and int(i) - 1 < len(config_files)):
|
||||
print "Importing file: %s" % config_files[int(i)-1]
|
||||
from loadconf import Command as LoadConfigCommand
|
||||
|
||||
LoadConfigCommand()._import_layer_config(config_files[int(i)-1])
|
||||
# we run lsupdates after config update
|
||||
print "Layer configuration imported. Updating information from the layer sources, please wait.\n You can re-update any time later by running bitbake/lib/toaster/manage.py lsupdates"
|
||||
from django.core.management import call_command
|
||||
call_command("lsupdates")
|
||||
|
||||
# we don't look for any other config files
|
||||
return is_changed
|
||||
except Exception as e:
|
||||
print "Failure while trying to import the toaster config file: %s" % e
|
||||
else:
|
||||
print "\n Toaster could not find a configuration file. You need to configure Toaster manually using the web interface, or create a configuration file and use\n bitbake/lib/toaster/managepy.py loadconf [filename]\n command to load it. You can use https://wiki.yoctoproject.org/wiki/File:Toasterconf.json.txt.patch as a starting point."
|
||||
|
||||
|
||||
|
||||
if is_changed and be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
baselayerdir = DN(DN(self._find_first_path_for_file(be.sourcedir, "toasterconf.json", 3)))
|
||||
if baselayerdir:
|
||||
i = raw_input(" -- Do you want to import basic layer configuration from \"%s\" ? (y/N):" % baselayerdir)
|
||||
if len(i) and i.upper()[0] == 'Y':
|
||||
self._import_layer_config(baselayerdir)
|
||||
# we run lsupdates after config update
|
||||
print "Updating information from the layer source, please wait."
|
||||
from django.core.management import call_command
|
||||
call_command("lsupdates")
|
||||
pass
|
||||
|
||||
return is_changed
|
||||
|
||||
@@ -203,12 +209,4 @@ class Command(NoArgsCommand):
|
||||
ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
|
||||
ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
|
||||
|
||||
# we are just starting up. we must not have any builds in progress, or build environments taken
|
||||
for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
|
||||
BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
|
||||
|
||||
BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS).update(state = BuildRequest.REQ_FAILED)
|
||||
|
||||
BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
return 0
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer, ReleaseLayerSourcePriority
|
||||
import os
|
||||
|
||||
from checksettings import DN
|
||||
|
||||
def _reduce_canon_path(path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
if len(components) < 2:
|
||||
components.append('')
|
||||
return "/".join(components)
|
||||
|
||||
def _get_id_for_sourcetype(s):
|
||||
for i in LayerSource.SOURCE_TYPE:
|
||||
if s == i[1]:
|
||||
return i[0]
|
||||
raise Exception("Could not find definition for sourcetype '%s'. Valid source types are %s" % (str(s), ', '.join(map(lambda x: "'%s'" % x[1], LayerSource.SOURCE_TYPE ))))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Loads a toasterconf.json file in the database"
|
||||
args = "filepath"
|
||||
|
||||
|
||||
|
||||
def _import_layer_config(self, filepath):
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
import json, pprint
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
def _read_git_url_from_local_repository(address):
|
||||
url = None
|
||||
# we detect the remote name at runtime
|
||||
import subprocess
|
||||
(remote, remote_name) = address.split(":", 1)
|
||||
cmd = subprocess.Popen("git remote -v", shell=True, cwd = os.path.dirname(filepath), stdout=subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(out,err) = cmd.communicate()
|
||||
if cmd.returncode != 0:
|
||||
raise Exception("Error while importing layer vcs_url: git error: %s" % err)
|
||||
for line in out.split("\n"):
|
||||
try:
|
||||
(name, path) = line.split("\t", 1)
|
||||
if name == remote_name:
|
||||
url = path.split(" ")[0]
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
if url == None:
|
||||
raise Exception("Error while looking for remote \"%s\" in \"%s\"" % (remote_name, out))
|
||||
return url
|
||||
|
||||
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
bvo.giturl = bvi['giturl']
|
||||
if bvi['giturl'].startswith("remote:"):
|
||||
bvo.giturl = _read_git_url_from_local_repository(bvi['giturl'])
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
|
||||
if _get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
apiurl = _reduce_canon_path(os.path.join(DN(os.path.abspath(filepath)), lsi['apiurl']))
|
||||
|
||||
assert ((_get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX) or apiurl.startswith("/")), (lsi['sourcetype'],apiurl)
|
||||
|
||||
try:
|
||||
ls = LayerSource.objects.get(sourcetype = _get_id_for_sourcetype(lsi['sourcetype']), apiurl = apiurl)
|
||||
except LayerSource.DoesNotExist:
|
||||
ls = LayerSource.objects.create(
|
||||
name = lsi['name'],
|
||||
sourcetype = _get_id_for_sourcetype(lsi['sourcetype']),
|
||||
apiurl = apiurl
|
||||
)
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = _reduce_canon_path(os.path.join(ls.apiurl, layerinfo['local_path']))
|
||||
|
||||
if not os.path.exists(lo.local_path):
|
||||
raise Exception("Local layer path %s must exists." % lo.local_path)
|
||||
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
if layerinfo['vcs_url'].startswith("remote:"):
|
||||
lo.vcs_url = _read_git_url_from_local_repository(layerinfo['vcs_url'])
|
||||
else:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
|
||||
if 'layer_index_url' in layerinfo:
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo, branch_name = ri['branch'])
|
||||
ro.description = ri['description']
|
||||
ro.helptext = ri['helptext']
|
||||
ro.save()
|
||||
|
||||
# save layer source priority for release
|
||||
for ls_name in ri['layersourcepriority'].keys():
|
||||
rlspo, created = ReleaseLayerSourcePriority.objects.get_or_create(release = ro, layer_source = LayerSource.objects.get(name=ls_name))
|
||||
rlspo.priority = ri['layersourcepriority'][ls_name]
|
||||
rlspo.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
# find layers with the same name
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer_name = dli)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Need a path to the toasterconf.json file")
|
||||
filepath = args[0]
|
||||
self._import_layer_config(filepath)
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import Build, ToasterSetting
|
||||
from orm.models import Build
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError, BRVariable
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("toaster")
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
@@ -35,7 +32,6 @@ class Command(NoArgsCommand):
|
||||
# select the build environment and the request to build
|
||||
br = self._selectBuildRequest()
|
||||
except IndexError as e:
|
||||
# logger.debug("runbuilds: No build request")
|
||||
return
|
||||
try:
|
||||
bec = self._selectBuildEnvironment()
|
||||
@@ -43,32 +39,26 @@ class Command(NoArgsCommand):
|
||||
# we could not find a BEC; postpone the BR
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
br.save()
|
||||
logger.debug("runbuilds: No build env")
|
||||
return
|
||||
|
||||
logger.debug("runbuilds: starting build %s, environment %s" % (br, bec.be))
|
||||
|
||||
# write the build identification variable
|
||||
BRVariable.objects.create(req = br, name="TOASTER_BRBE", value="%d:%d" % (br.pk, bec.be.pk))
|
||||
# let the build request know where it is being executed
|
||||
br.environment = bec.be
|
||||
br.save()
|
||||
|
||||
# set up the buid environment with the needed layers
|
||||
print "Build %s, Environment %s" % (br, bec.be)
|
||||
bec.setLayers(br.brbitbake_set.all(), br.brlayer_set.all())
|
||||
bec.writeConfFile("conf/toaster-pre.conf", br.brvariable_set.all())
|
||||
bec.writeConfFile("conf/toaster.conf", raw = "INHERIT+=\"toaster buildhistory\"")
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
# get the bb server running
|
||||
bbctrl = bec.getBBController()
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, br.brtarget_set.all()))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())), task)
|
||||
# let toasterui that this is a managed build
|
||||
bbctrl.setVariable("TOASTER_BRBE", "%d:%d" % (br.pk, bec.be.pk))
|
||||
|
||||
logger.debug("runbuilds: Build launched, exiting. Follow build logs at %s/toaster_ui.log" % bec.be.builddir)
|
||||
# set the build configuration
|
||||
for variable in br.brvariable_set.all():
|
||||
bbctrl.setVariable(variable.name, variable.value)
|
||||
|
||||
# trigger the build command
|
||||
bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())))
|
||||
|
||||
print "Build launched, exiting"
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
@@ -76,50 +66,17 @@ class Command(NoArgsCommand):
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error("runbuilds: Error executing shell command %s" % e)
|
||||
print " EE Error executing shell command\n", e
|
||||
traceback.print_exc(e)
|
||||
if "[Errno 111] Connection refused" in str(e):
|
||||
# Connection refused, read toaster_server.out
|
||||
errmsg = bec.readServerLogFile()
|
||||
else:
|
||||
errmsg = str(e)
|
||||
|
||||
BRError.objects.create(req = br,
|
||||
errtype = str(type(e)),
|
||||
errmsg = errmsg,
|
||||
traceback = traceback.format_exc(e))
|
||||
errtype = str(type(e)),
|
||||
errmsg = str(e),
|
||||
traceback = traceback.format_exc(e))
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
bec.be.lock = BuildEnvironment.LOCK_FREE
|
||||
bec.be.save()
|
||||
|
||||
def archive(self):
|
||||
''' archives data from the builds '''
|
||||
artifact_storage_dir = ToasterSetting.objects.get(name="ARTIFACTS_STORAGE_DIR").value
|
||||
for br in BuildRequest.objects.filter(state = BuildRequest.REQ_ARCHIVE):
|
||||
# save cooker log
|
||||
if br.build == None:
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
continue
|
||||
build_artifact_storage_dir = os.path.join(artifact_storage_dir, "%d" % br.build.pk)
|
||||
try:
|
||||
os.makedirs(build_artifact_storage_dir)
|
||||
except OSError as ose:
|
||||
if "File exists" in str(ose):
|
||||
pass
|
||||
else:
|
||||
raise ose
|
||||
|
||||
file_name = os.path.join(build_artifact_storage_dir, "cooker_log.txt")
|
||||
try:
|
||||
with open(file_name, "w") as f:
|
||||
f.write(br.environment.get_artifact(br.build.cooker_log_path).read())
|
||||
except IOError:
|
||||
os.unlink(file_name)
|
||||
|
||||
br.state = BuildRequest.REQ_COMPLETED
|
||||
br.save()
|
||||
|
||||
def cleanup(self):
|
||||
from django.utils import timezone
|
||||
@@ -130,5 +87,4 @@ class Command(NoArgsCommand):
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
self.cleanup()
|
||||
self.archive()
|
||||
self.schedule()
|
||||
|
||||
@@ -1,145 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'BuildRequest.environment'
|
||||
db.add_column(u'bldcontrol_buildrequest', 'environment',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildEnvironment'], null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['orm.Build'], unique=True, null=True))
|
||||
# Adding unique constraint on 'BuildRequest', fields ['build']
|
||||
db.create_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Removing unique constraint on 'BuildRequest', fields ['build']
|
||||
db.delete_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
# Deleting field 'BuildRequest.environment'
|
||||
db.delete_column(u'bldcontrol_buildrequest', 'environment_id')
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True))
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,138 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import DataMigration
|
||||
from django.db import models
|
||||
|
||||
class Migration(DataMigration):
|
||||
# ids that cannot be imported from BuildRequest
|
||||
|
||||
def forwards(self, orm):
|
||||
REQ_COMPLETED = 3
|
||||
REQ_ARCHIVE = 6
|
||||
"Write your forwards methods here."
|
||||
# Note: Don't use "from appname.models import ModelName".
|
||||
# Use orm.ModelName to refer to models in this application,
|
||||
# and orm['appname.ModelName'] for models in other applications.
|
||||
orm.BuildRequest.objects.filter(state=REQ_COMPLETED).update(state=REQ_ARCHIVE)
|
||||
|
||||
def backwards(self, orm):
|
||||
REQ_COMPLETED = 3
|
||||
REQ_ARCHIVE = 6
|
||||
"Write your backwards methods here."
|
||||
orm.BuildRequest.objects.filter(state=REQ_ARCHIVE).update(state=REQ_COMPLETED)
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
symmetrical = True
|
||||
@@ -40,50 +40,6 @@ class BuildEnvironment(models.Model):
|
||||
updated = models.DateTimeField(auto_now = True)
|
||||
|
||||
|
||||
def get_artifact_type(self, path):
|
||||
if self.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
try:
|
||||
import magic
|
||||
|
||||
# fair warning: this is a mess; there are multiple competeing and incompatible
|
||||
# magic modules floating around, so we try some of the most common combinations
|
||||
|
||||
try: # we try ubuntu's python-magic 5.4
|
||||
m = magic.open(magic.MAGIC_MIME_TYPE)
|
||||
m.load()
|
||||
return m.file(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try: # we try python-magic 0.4.6
|
||||
m = magic.Magic(magic.MAGIC_MIME)
|
||||
return m.from_file(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
try: # we try pip filemagic 1.6
|
||||
m = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
|
||||
return m.id_filename(path)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return "binary/octet-stream"
|
||||
except ImportError:
|
||||
return "binary/octet-stream"
|
||||
raise Exception("FIXME: artifact type not implemented for build environment type %s" % be.get_betype_display())
|
||||
|
||||
|
||||
def get_artifact(self, path):
|
||||
if self.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
return open(path, "r")
|
||||
raise Exception("FIXME: artifact download not implemented for build environment type %s" % be.get_betype_display())
|
||||
|
||||
def has_artifact(self, path):
|
||||
import os
|
||||
if self.betype == BuildRequest.TYPE_LOCAL:
|
||||
return os.path.exists(path)
|
||||
raise Exception("FIXME: has artifact not implemented for build environment type %s" % be.get_betype_display())
|
||||
|
||||
# a BuildRequest is a request that the scheduler will build using a BuildEnvironment
|
||||
# the build request queue is the table itself, ordered by state
|
||||
|
||||
@@ -93,8 +49,6 @@ class BuildRequest(models.Model):
|
||||
REQ_INPROGRESS = 2
|
||||
REQ_COMPLETED = 3
|
||||
REQ_FAILED = 4
|
||||
REQ_DELETED = 5
|
||||
REQ_ARCHIVE = 6
|
||||
|
||||
REQUEST_STATE = (
|
||||
(REQ_CREATED, "created"),
|
||||
@@ -102,28 +56,14 @@ class BuildRequest(models.Model):
|
||||
(REQ_INPROGRESS, "in progress"),
|
||||
(REQ_COMPLETED, "completed"),
|
||||
(REQ_FAILED, "failed"),
|
||||
(REQ_DELETED, "deleted"),
|
||||
(REQ_ARCHIVE, "archive"),
|
||||
)
|
||||
|
||||
search_allowed_fields = ("brtarget__target", "build__project__name")
|
||||
|
||||
project = models.ForeignKey(Project)
|
||||
build = models.OneToOneField(Build, null = True) # TODO: toasterui should set this when Build is created
|
||||
environment = models.ForeignKey(BuildEnvironment, null = True)
|
||||
build = models.ForeignKey(Build, null = True) # TODO: toasterui should set this when Build is created
|
||||
state = models.IntegerField(choices = REQUEST_STATE, default = REQ_CREATED)
|
||||
created = models.DateTimeField(auto_now_add = True)
|
||||
updated = models.DateTimeField(auto_now = True)
|
||||
|
||||
def get_duration(self):
|
||||
return (self.updated - self.created).total_seconds()
|
||||
|
||||
def get_sorted_target_list(self):
|
||||
tgts = self.brtarget_set.order_by( 'target' );
|
||||
return( tgts );
|
||||
|
||||
def get_machine(self):
|
||||
return self.brvariable_set.get(name="MACHINE").value
|
||||
|
||||
# These tables specify the settings for running an actual build.
|
||||
# They MUST be kept in sync with the tables in orm.models.Project*
|
||||
|
||||
@@ -29,7 +29,7 @@ import subprocess
|
||||
|
||||
from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
|
||||
|
||||
def DN(path):
|
||||
return "/".join(path.split("/")[0:-1])
|
||||
@@ -77,21 +77,16 @@ class SSHBEController(BuildEnvironmentController):
|
||||
self._pathcreate(self.be.builddir)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
|
||||
|
||||
def startBBServer(self, brbe):
|
||||
def startBBServer(self):
|
||||
assert self.pokydirname and self._pathexists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
cmd = self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb brbe=%s\"" % (self.pokydirname, self.be.builddir, self.dburl, brbe))
|
||||
|
||||
port = "-1"
|
||||
for i in cmd.split("\n"):
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
print "Found bitbake server port ", port
|
||||
|
||||
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
|
||||
# FIXME unfortunate sleep 1 - we need to make sure that bbserver is started and the toaster ui is connected
|
||||
# but since they start async without any return, we just wait a bit
|
||||
print "Started server"
|
||||
assert self.be.sourcedir and self._pathexists(self.be.builddir)
|
||||
self.be.bbaddress = self.be.address.split("@")[-1]
|
||||
self.be.bbport = port
|
||||
self.be.bbport = "8200"
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STARTED
|
||||
self.be.save()
|
||||
|
||||
@@ -104,19 +99,6 @@ class SSHBEController(BuildEnvironmentController):
|
||||
self.be.save()
|
||||
print "Stopped server"
|
||||
|
||||
|
||||
def _copyFile(self, filepath1, filepath2):
|
||||
p = subprocess.Popen("scp '%s' '%s'" % (filepath1, filepath2), stdout=subprocess.PIPE, stderr = subprocess.PIPE, shell=True)
|
||||
(out, err) = p.communicate()
|
||||
if p.returncode:
|
||||
raise ShellCmdException(err)
|
||||
|
||||
def pullFile(self, local_filename, remote_filename):
|
||||
_copyFile(local_filename, "%s:%s" % (self.be.address, remote_filename))
|
||||
|
||||
def pushFile(self, local_filename, remote_filename):
|
||||
_copyFile("%s:%s" % (self.be.address, remote_filename), local_filename)
|
||||
|
||||
def setLayers(self, bitbakes, layers):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
|
||||
@@ -124,8 +106,62 @@ class SSHBEController(BuildEnvironmentController):
|
||||
assert len(bitbakes) == 1
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos, and map dirpaths for each layer
|
||||
gitrepos = {}
|
||||
gitrepos[bitbakes[0].giturl] = []
|
||||
gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
|
||||
|
||||
for layer in layers:
|
||||
# we don't process local URLs
|
||||
if layer.giturl.startswith("file://"):
|
||||
continue
|
||||
if not layer.giturl in gitrepos:
|
||||
gitrepos[layer.giturl] = []
|
||||
gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
|
||||
for giturl in gitrepos.keys():
|
||||
commitid = gitrepos[giturl][0][2]
|
||||
for e in gitrepos[giturl]:
|
||||
if commitid != e[2]:
|
||||
raise BuildSetupException("More than one commit per git url, unsupported configuration")
|
||||
|
||||
layerlist = []
|
||||
|
||||
# 2. checkout the repositories
|
||||
for giturl in gitrepos.keys():
|
||||
import os
|
||||
localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
|
||||
print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
|
||||
|
||||
# make sure our directory is a git repository
|
||||
if self._pathexists(localdirname):
|
||||
if not giturl in self._shellcmd("git remote -v", localdirname):
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
|
||||
else:
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
|
||||
# checkout the needed commit
|
||||
commit = gitrepos[giturl][0][2]
|
||||
|
||||
# branch magic name "HEAD" will inhibit checkout
|
||||
if commit != "HEAD":
|
||||
print "DEBUG: checking out commit ", commit, "to", localdirname
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
|
||||
|
||||
# take the localdirname as poky dir if we can find the oe-init-build-env
|
||||
if self.pokydirname is None and self._pathexists(os.path.join(localdirname, "oe-init-build-env")):
|
||||
print "DEBUG: selected poky dir name", localdirname
|
||||
self.pokydirname = localdirname
|
||||
|
||||
# verify our repositories
|
||||
for name, dirpath, commit in gitrepos[giturl]:
|
||||
localdirpath = os.path.join(localdirname, dirpath)
|
||||
if not self._pathexists(localdirpath):
|
||||
raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
|
||||
|
||||
if name != "bitbake":
|
||||
layerlist.append(localdirpath)
|
||||
|
||||
print "DEBUG: current layer list ", layerlist
|
||||
|
||||
raise Exception("Not implemented: SSH setLayers")
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
@@ -135,15 +171,17 @@ class SSHBEController(BuildEnvironmentController):
|
||||
if not self._pathexists(bblayerconf):
|
||||
raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
|
||||
|
||||
import uuid
|
||||
local_bblayerconf = "/tmp/" + uuid.uuid4() + "-bblayer.conf"
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
self.pullFile(bblayerconf, local_bblayerconf)
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
for i in xrange(len(conflines)):
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
i += 2
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
BuildEnvironmentController._updateBBLayers(local_bblayerconf, layerlist)
|
||||
self.pushFile(local_bblayerconf, bblayerconf)
|
||||
|
||||
os.unlink(local_bblayerconf)
|
||||
bblayerconffile.write("\n# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
self.islayerset = True
|
||||
return True
|
||||
|
||||
@@ -44,7 +44,7 @@ class BEControllerTests(object):
|
||||
|
||||
# test start server and stop
|
||||
self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Port already occupied")
|
||||
bc.startBBServer("0:0")
|
||||
bc.startBBServer()
|
||||
self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not answering")
|
||||
|
||||
bc.stopBBServer()
|
||||
@@ -57,8 +57,14 @@ class BEControllerTests(object):
|
||||
bc = self._getBEController(obe)
|
||||
bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
|
||||
|
||||
bbc = bc.getBBController("%d:%d" % (-1, obe.pk))
|
||||
bbc = bc.getBBController()
|
||||
self.assertTrue(isinstance(bbc, BitbakeController))
|
||||
# test set variable, use no build marker -1 for BR value
|
||||
try:
|
||||
bbc.setVariable("TOASTER_BRBE", "%d:%d" % (-1, obe.pk))
|
||||
except Exception as e :
|
||||
self.fail("setVariable raised %s", e)
|
||||
|
||||
bc.stopBBServer()
|
||||
|
||||
self._serverForceStop(bc)
|
||||
@@ -130,7 +136,7 @@ class RunBuildsCommandTests(TestCase):
|
||||
|
||||
def test_br_select(self):
|
||||
from orm.models import Project, Release, BitbakeVersion
|
||||
p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch=Branch.objects.get_or_create(name="HEAD"))[0])[0])
|
||||
p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch="HEAD")[0])[0])
|
||||
obr = BuildRequest.objects.create(state = BuildRequest.REQ_QUEUED, project = p)
|
||||
command = Command()
|
||||
br = command._selectBuildRequest()
|
||||
@@ -141,22 +147,3 @@ class RunBuildsCommandTests(TestCase):
|
||||
self.assertTrue(br.state == BuildRequest.REQ_INPROGRESS, "Request is not updated")
|
||||
# no more selections possible here
|
||||
self.assertRaises(IndexError, command._selectBuildRequest)
|
||||
|
||||
|
||||
class UtilityTests(TestCase):
|
||||
def test_reduce_path(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
|
||||
self.assertTrue( _reduce_canon_path("/") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana") == "/ana")
|
||||
self.assertTrue( _reduce_canon_path("/home/../ana/..") == "/")
|
||||
self.assertTrue( _reduce_canon_path("/home/ana/mihai/../maria") == "/home/ana/maria")
|
||||
|
||||
def test_get_id_for_sorucetype(self):
|
||||
from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
|
||||
self.assertTrue( _get_id_for_sourcetype("layerindex") == 1)
|
||||
self.assertTrue( _get_id_for_sourcetype("local") == 0)
|
||||
self.assertTrue( _get_id_for_sourcetype("imported") == 2)
|
||||
with self.assertRaises(Exception):
|
||||
_get_id_for_sourcetype("unknown")
|
||||
|
||||
@@ -13,10 +13,7 @@ import json, os, re, urllib, shlex
|
||||
|
||||
|
||||
class Tests(TestCase):
|
||||
# fixtures = ['orm_views_testdata.json']
|
||||
|
||||
def setUp(self):
|
||||
raise Exception("The %s test data is not longer valid, tests disabled" % __name__)
|
||||
fixtures = ['orm_views_testdata.json']
|
||||
|
||||
def test_builds(self):
|
||||
client = Client()
|
||||
|
||||
@@ -1,34 +1,17 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.filters import RelatedFieldListFilter
|
||||
from .models import BitbakeVersion, Release, LayerSource, ToasterSetting
|
||||
from django.forms.widgets import Textarea
|
||||
from django import forms
|
||||
import django.db.models as models
|
||||
|
||||
from django.contrib.admin import widgets, helpers
|
||||
from .models import Branch, LayerSource, ToasterSetting
|
||||
|
||||
class LayerSourceAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
class BitbakeVersionAdmin(admin.ModelAdmin):
|
||||
|
||||
# we override the formfield for db URLField because of broken URL validation
|
||||
|
||||
def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
if isinstance(db_field, models.fields.URLField):
|
||||
return forms.fields.CharField()
|
||||
return super(BitbakeVersionAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
||||
|
||||
|
||||
|
||||
class ReleaseAdmin(admin.ModelAdmin):
|
||||
class BranchAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
class ToasterSettingAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(LayerSource, LayerSourceAdmin)
|
||||
admin.site.register(BitbakeVersion, BitbakeVersionAdmin)
|
||||
admin.site.register(Release, ReleaseAdmin)
|
||||
admin.site.register(Branch, BranchAdmin)
|
||||
admin.site.register(ToasterSetting, ToasterSettingAdmin)
|
||||
|
||||
|
||||
140835
bitbake/lib/toaster/orm/fixtures/orm_views_testdata.json
Normal file
@@ -1,336 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
|
||||
# Changing field 'Package.summary'
|
||||
db.alter_column(u'orm_package', 'summary', self.gf('django.db.models.fields.TextField')())
|
||||
|
||||
# Changing field 'Layer.summary'
|
||||
db.alter_column(u'orm_layer', 'summary', self.gf('django.db.models.fields.TextField')(null=True))
|
||||
|
||||
# Changing field 'Recipe.summary'
|
||||
db.alter_column(u'orm_recipe', 'summary', self.gf('django.db.models.fields.TextField')())
|
||||
|
||||
def backwards(self, orm):
|
||||
|
||||
# Changing field 'Package.summary'
|
||||
db.alter_column(u'orm_package', 'summary', self.gf('django.db.models.fields.CharField')(max_length=200))
|
||||
|
||||
# Changing field 'Layer.summary'
|
||||
db.alter_column(u'orm_layer', 'summary', self.gf('django.db.models.fields.CharField')(max_length=200, null=True))
|
||||
|
||||
# Changing field 'Recipe.summary'
|
||||
db.alter_column(u'orm_recipe', 'summary', self.gf('django.db.models.fields.CharField')(max_length=100))
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
'bitbake_branch': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.toastersettingdefaultlayer': {
|
||||
'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -1,336 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'Layer.vcs_web_url'
|
||||
db.add_column(u'orm_layer', 'vcs_web_url',
|
||||
self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'Layer.vcs_web_tree_base_url'
|
||||
db.add_column(u'orm_layer', 'vcs_web_tree_base_url',
|
||||
self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting field 'Layer.vcs_web_url'
|
||||
db.delete_column(u'orm_layer', 'vcs_web_url')
|
||||
|
||||
# Deleting field 'Layer.vcs_web_tree_base_url'
|
||||
db.delete_column(u'orm_layer', 'vcs_web_tree_base_url')
|
||||
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
'bitbake_branch': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.toastersettingdefaultlayer': {
|
||||
'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -1,359 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'Release.helptext'
|
||||
db.add_column(u'orm_release', 'helptext',
|
||||
self.gf('django.db.models.fields.TextField')(null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
# Renaming column for 'Release.branch' to match new field type.
|
||||
db.delete_column(u'orm_release', 'branch')
|
||||
|
||||
# Changing field 'Release.branch'
|
||||
db.add_column(u'orm_release', 'branch', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Branch'], default=-1))
|
||||
|
||||
# Deleting field 'Branch.bitbake_branch'
|
||||
db.delete_column(u'orm_branch', 'bitbake_branch')
|
||||
|
||||
# Adding unique constraint on 'Recipe', fields ['layer_version', 'file_path']
|
||||
db.create_unique(u'orm_recipe', ['layer_version_id', 'file_path'])
|
||||
|
||||
# Adding unique constraint on 'ProjectLayer', fields ['project', 'layercommit']
|
||||
db.create_unique(u'orm_projectlayer', ['project_id', 'layercommit_id'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Removing unique constraint on 'ProjectLayer', fields ['project', 'layercommit']
|
||||
db.delete_unique(u'orm_projectlayer', ['project_id', 'layercommit_id'])
|
||||
|
||||
# Removing unique constraint on 'Recipe', fields ['layer_version', 'file_path']
|
||||
db.delete_unique(u'orm_recipe', ['layer_version_id', 'file_path'])
|
||||
|
||||
# Deleting field 'Release.helptext'
|
||||
db.delete_column(u'orm_release', 'helptext')
|
||||
|
||||
# Renaming column for 'Release.branch' to match new field type.
|
||||
db.rename_column(u'orm_release', 'branch_id', 'branch')
|
||||
# Changing field 'Release.branch'
|
||||
db.alter_column(u'orm_release', 'branch', self.gf('django.db.models.fields.CharField')(max_length=32))
|
||||
# Adding field 'Branch.bitbake_branch'
|
||||
db.add_column(u'orm_branch', 'bitbake_branch',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=50, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Branch']"}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.toastersettingdefaultlayer': {
|
||||
'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -1,396 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Deleting model 'ToasterSettingDefaultLayer'
|
||||
db.delete_table(u'orm_toastersettingdefaultlayer')
|
||||
|
||||
# Adding model 'ReleaseLayerSourcePriority'
|
||||
db.create_table(u'orm_releaselayersourcepriority', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('release', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Release'])),
|
||||
('layer_source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.LayerSource'])),
|
||||
('priority', self.gf('django.db.models.fields.IntegerField')(default=0)),
|
||||
))
|
||||
db.send_create_signal(u'orm', ['ReleaseLayerSourcePriority'])
|
||||
|
||||
# Adding unique constraint on 'ReleaseLayerSourcePriority', fields ['release', 'layer_source']
|
||||
db.create_unique(u'orm_releaselayersourcepriority', ['release_id', 'layer_source_id'])
|
||||
|
||||
# Deleting field 'Release.branch'
|
||||
db.delete_column(u'orm_release', 'branch_id')
|
||||
|
||||
# Adding field 'Release.branch_name'
|
||||
db.add_column(u'orm_release', 'branch_name',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=50),
|
||||
keep_default=False)
|
||||
|
||||
# Adding unique constraint on 'LayerSource', fields ['name']
|
||||
db.create_unique(u'orm_layersource', ['name'])
|
||||
|
||||
# Deleting field 'ReleaseDefaultLayer.layer'
|
||||
db.delete_column(u'orm_releasedefaultlayer', 'layer_id')
|
||||
|
||||
# Adding field 'ReleaseDefaultLayer.layer_name'
|
||||
db.add_column(u'orm_releasedefaultlayer', 'layer_name',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Removing unique constraint on 'LayerSource', fields ['name']
|
||||
db.delete_unique(u'orm_layersource', ['name'])
|
||||
|
||||
# Removing unique constraint on 'ReleaseLayerSourcePriority', fields ['release', 'layer_source']
|
||||
db.delete_unique(u'orm_releaselayersourcepriority', ['release_id', 'layer_source_id'])
|
||||
|
||||
# Adding model 'ToasterSettingDefaultLayer'
|
||||
db.create_table(u'orm_toastersettingdefaultlayer', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('layer_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'])),
|
||||
))
|
||||
db.send_create_signal(u'orm', ['ToasterSettingDefaultLayer'])
|
||||
|
||||
# Deleting model 'ReleaseLayerSourcePriority'
|
||||
db.delete_table(u'orm_releaselayersourcepriority')
|
||||
|
||||
|
||||
# User chose to not deal with backwards NULL issues for 'Release.branch'
|
||||
raise RuntimeError("Cannot reverse this migration. 'Release.branch' and its values cannot be restored.")
|
||||
|
||||
# The following code is provided here to aid in writing a correct migration # Adding field 'Release.branch'
|
||||
db.add_column(u'orm_release', 'branch',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Branch']),
|
||||
keep_default=False)
|
||||
|
||||
# Deleting field 'Release.branch_name'
|
||||
db.delete_column(u'orm_release', 'branch_name')
|
||||
|
||||
|
||||
# User chose to not deal with backwards NULL issues for 'ReleaseDefaultLayer.layer'
|
||||
raise RuntimeError("Cannot reverse this migration. 'ReleaseDefaultLayer.layer' and its values cannot be restored.")
|
||||
|
||||
# The following code is provided here to aid in writing a correct migration # Adding field 'ReleaseDefaultLayer.layer'
|
||||
db.add_column(u'orm_releasedefaultlayer', 'layer',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer']),
|
||||
keep_default=False)
|
||||
|
||||
# Deleting field 'ReleaseDefaultLayer.layer_name'
|
||||
db.delete_column(u'orm_releasedefaultlayer', 'layer_name')
|
||||
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.releaselayersourcepriority': {
|
||||
'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -1,331 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'Layer_Version.project'
|
||||
db.add_column(u'orm_layer_version', 'project',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.Project'], null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting field 'Layer_Version.project'
|
||||
db.delete_column(u'orm_layer_version', 'project_id')
|
||||
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.releaselayersourcepriority': {
|
||||
'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -1,342 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding model 'BuildArtifact'
|
||||
db.create_table(u'orm_buildartifact', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('build', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'])),
|
||||
('file_name', self.gf('django.db.models.fields.FilePathField')(max_length=100)),
|
||||
('file_size', self.gf('django.db.models.fields.IntegerField')()),
|
||||
))
|
||||
db.send_create_signal(u'orm', ['BuildArtifact'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting model 'BuildArtifact'
|
||||
db.delete_table(u'orm_buildartifact')
|
||||
|
||||
|
||||
models = {
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.branch': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.buildartifact': {
|
||||
'Meta': {'object_name': 'BuildArtifact'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
|
||||
},
|
||||
u'orm.helptext': {
|
||||
'Meta': {'object_name': 'HelpText'},
|
||||
'area': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'text': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'orm.layer': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
|
||||
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
|
||||
'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
|
||||
},
|
||||
u'orm.layer_version': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.layersource': {
|
||||
'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
|
||||
'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
|
||||
'sourcetype': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.layerversiondependency': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.logmessage': {
|
||||
'Meta': {'object_name': 'LogMessage'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.machine': {
|
||||
'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
|
||||
},
|
||||
u'orm.package': {
|
||||
'Meta': {'object_name': 'Package'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
|
||||
'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.package_dependency': {
|
||||
'Meta': {'object_name': 'Package_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
|
||||
},
|
||||
u'orm.package_file': {
|
||||
'Meta': {'object_name': 'Package_File'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.projectlayer': {
|
||||
'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
|
||||
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
|
||||
},
|
||||
u'orm.projecttarget': {
|
||||
'Meta': {'object_name': 'ProjectTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'orm.projectvariable': {
|
||||
'Meta': {'object_name': 'ProjectVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe': {
|
||||
'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
|
||||
'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
|
||||
'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
|
||||
'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
|
||||
'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
|
||||
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
|
||||
},
|
||||
u'orm.recipe_dependency': {
|
||||
'Meta': {'object_name': 'Recipe_Dependency'},
|
||||
'dep_type': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.releasedefaultlayer': {
|
||||
'Meta': {'object_name': 'ReleaseDefaultLayer'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.releaselayersourcepriority': {
|
||||
'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
|
||||
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
|
||||
},
|
||||
u'orm.target': {
|
||||
'Meta': {'object_name': 'Target'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
|
||||
},
|
||||
u'orm.target_file': {
|
||||
'Meta': {'object_name': 'Target_File'},
|
||||
'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'inodetype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
|
||||
'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
|
||||
'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
|
||||
'size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_image_file': {
|
||||
'Meta': {'object_name': 'Target_Image_File'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
|
||||
'file_size': ('django.db.models.fields.IntegerField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.target_installed_package': {
|
||||
'Meta': {'object_name': 'Target_Installed_Package'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
|
||||
'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
|
||||
},
|
||||
u'orm.task': {
|
||||
'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
|
||||
'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
|
||||
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
|
||||
'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
|
||||
'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
|
||||
'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
|
||||
'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
|
||||
},
|
||||
u'orm.task_dependency': {
|
||||
'Meta': {'object_name': 'Task_Dependency'},
|
||||
'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
|
||||
},
|
||||
u'orm.toastersetting': {
|
||||
'Meta': {'object_name': 'ToasterSetting'},
|
||||
'helptext': ('django.db.models.fields.TextField', [], {}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
|
||||
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
|
||||
},
|
||||
u'orm.variable': {
|
||||
'Meta': {'object_name': 'Variable'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
|
||||
'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
|
||||
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'orm.variablehistory': {
|
||||
'Meta': {'object_name': 'VariableHistory'},
|
||||
'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
|
||||
'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
|
||||
'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['orm']
|
||||
@@ -20,39 +20,17 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import F, Q
|
||||
from django.db.models import F
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
from django.core import validators
|
||||
from django.conf import settings
|
||||
|
||||
class GitURLValidator(validators.URLValidator):
|
||||
import re
|
||||
regex = re.compile(
|
||||
r'^(?:ssh|git|http|ftp)s?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
|
||||
r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
def GitURLField(**kwargs):
|
||||
r = models.URLField(**kwargs)
|
||||
for i in xrange(len(r.validators)):
|
||||
if isinstance(r.validators[i], validators.URLValidator):
|
||||
r.validators[i] = GitURLValidator()
|
||||
return r
|
||||
|
||||
|
||||
class ToasterSetting(models.Model):
|
||||
name = models.CharField(max_length=63)
|
||||
helptext = models.TextField()
|
||||
value = models.CharField(max_length=255)
|
||||
|
||||
def __unicode__(self):
|
||||
return "Setting %s = " % (self.name, self.value)
|
||||
class ToasterSettingDefaultLayer(models.Model):
|
||||
layer_version = models.ForeignKey('Layer_Version')
|
||||
|
||||
class ProjectManager(models.Manager):
|
||||
def create_project(self, name, release):
|
||||
@@ -65,16 +43,12 @@ class ProjectManager(models.Manager):
|
||||
name = name,
|
||||
value = defaultconf.value)
|
||||
|
||||
|
||||
for rdl in release.releasedefaultlayer_set.all():
|
||||
try:
|
||||
lv =Layer_Version.objects.filter(layer__name = rdl.layer_name, up_branch__name = release.branch_name)[0].get_equivalents_wpriority(prj)[0]
|
||||
ProjectLayer.objects.create( project = prj,
|
||||
for layer in map(lambda x: x.layer, ReleaseDefaultLayer.objects.filter(release = release)):
|
||||
for branches in Branch.objects.filter(name = release.branch):
|
||||
for lv in Layer_Version.objects.filter(layer = layer, up_branch = branches ):
|
||||
ProjectLayer.objects.create( project = prj,
|
||||
layercommit = lv,
|
||||
optional = False )
|
||||
except IndexError:
|
||||
# we may have no valid layer version objects, and that's ok
|
||||
pass
|
||||
|
||||
return prj
|
||||
|
||||
@@ -85,7 +59,6 @@ class ProjectManager(models.Manager):
|
||||
raise Exception("Invalid call to Project.objects.get_or_create. Use Project.objects.create_project() to create a project")
|
||||
|
||||
class Project(models.Model):
|
||||
search_allowed_fields = ['name', 'short_description', 'release__name', 'release__branch_name']
|
||||
name = models.CharField(max_length=100)
|
||||
short_description = models.CharField(max_length=50, blank=True)
|
||||
bitbake_version = models.ForeignKey('BitbakeVersion')
|
||||
@@ -99,115 +72,25 @@ class Project(models.Model):
|
||||
user_id = models.IntegerField(null = True)
|
||||
objects = ProjectManager()
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (Release %s, BBV %s)" % (self.name, self.release, self.bitbake_version)
|
||||
|
||||
def get_current_machine_name(self):
|
||||
try:
|
||||
return self.projectvariable_set.get(name="MACHINE").value
|
||||
except (ProjectVariable.DoesNotExist,IndexError):
|
||||
return( "None" );
|
||||
|
||||
def get_number_of_builds(self):
|
||||
try:
|
||||
return len(Build.objects.filter( project = self.id ))
|
||||
except (Build.DoesNotExist,IndexError):
|
||||
return( 0 )
|
||||
|
||||
def get_last_build_id(self):
|
||||
try:
|
||||
return Build.objects.filter( project = self.id ).order_by('-completed_on')[0].id
|
||||
except (Build.DoesNotExist,IndexError):
|
||||
return( -1 )
|
||||
|
||||
def get_last_outcome(self):
|
||||
build_id = self.get_last_build_id
|
||||
if (-1 == build_id):
|
||||
return( "" )
|
||||
try:
|
||||
return Build.objects.filter( id = self.get_last_build_id )[ 0 ].outcome
|
||||
except (Build.DoesNotExist,IndexError):
|
||||
return( "not_found" )
|
||||
|
||||
def get_last_target(self):
|
||||
build_id = self.get_last_build_id
|
||||
if (-1 == build_id):
|
||||
return( "" )
|
||||
try:
|
||||
return Target.objects.filter(build = build_id)[0].target
|
||||
except (Target.DoesNotExist,IndexError):
|
||||
return( "not_found" )
|
||||
|
||||
def get_last_errors(self):
|
||||
build_id = self.get_last_build_id
|
||||
if (-1 == build_id):
|
||||
return( 0 )
|
||||
try:
|
||||
return Build.objects.filter(id = build_id)[ 0 ].errors_no
|
||||
except (Build.DoesNotExist,IndexError):
|
||||
return( "not_found" )
|
||||
|
||||
def get_last_warnings(self):
|
||||
build_id = self.get_last_build_id
|
||||
if (-1 == build_id):
|
||||
return( 0 )
|
||||
try:
|
||||
return Build.objects.filter(id = build_id)[ 0 ].warnings_no
|
||||
except (Build.DoesNotExist,IndexError):
|
||||
return( "not_found" )
|
||||
|
||||
def get_last_imgfiles(self):
|
||||
build_id = self.get_last_build_id
|
||||
if (-1 == build_id):
|
||||
return( "" )
|
||||
try:
|
||||
return Variable.objects.filter(build = build_id, variable_name = "IMAGE_FSTYPES")[ 0 ].variable_value
|
||||
except (Variable.DoesNotExist,IndexError):
|
||||
return( "not_found" )
|
||||
|
||||
# returns a queryset of compatible layers for a project
|
||||
def compatible_layerversions(self, release = None, layer_name = None):
|
||||
if release == None:
|
||||
release = self.release
|
||||
# layers on the same branch or layers specifically set for this project
|
||||
queryset = Layer_Version.objects.filter((Q(up_branch__name = release.branch_name) & Q(project = None)) | Q(project = self) | Q(build__project = self))
|
||||
if layer_name is not None:
|
||||
# we select only a layer name
|
||||
queryset = queryset.filter(layer__name = layer_name)
|
||||
|
||||
# order by layer version priority
|
||||
queryset = queryset.filter(layer_source__releaselayersourcepriority__release = release).order_by("-layer_source__releaselayersourcepriority__priority")
|
||||
|
||||
return queryset
|
||||
|
||||
# returns a set of layer-equivalent set of layers already in project
|
||||
def projectlayer_equivalent_set(self):
|
||||
return [j for i in [x.layercommit.get_equivalents_wpriority(self) for x in self.projectlayer_set.all().select_related("up_branch")] for j in i]
|
||||
|
||||
def schedule_build(self):
|
||||
from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable, BRBitbake
|
||||
br = BuildRequest.objects.create(project = self)
|
||||
try:
|
||||
|
||||
BRBitbake.objects.create(req = br,
|
||||
giturl = self.bitbake_version.giturl,
|
||||
commit = self.bitbake_version.branch,
|
||||
dirpath = self.bitbake_version.dirpath)
|
||||
BRBitbake.objects.create(req = br,
|
||||
giturl = self.bitbake_version.giturl,
|
||||
commit = self.bitbake_version.branch,
|
||||
dirpath = self.bitbake_version.dirpath)
|
||||
|
||||
for l in self.projectlayer_set.all().order_by("pk"):
|
||||
commit = l.layercommit.get_vcs_reference()
|
||||
print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit)
|
||||
BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath)
|
||||
for t in self.projecttarget_set.all():
|
||||
BRTarget.objects.create(req = br, target = t.target, task = t.task)
|
||||
for v in self.projectvariable_set.all():
|
||||
BRVariable.objects.create(req = br, name = v.name, value = v.value)
|
||||
for l in self.projectlayer_set.all():
|
||||
BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = l.layercommit.commit, dirpath = l.layercommit.dirpath)
|
||||
for t in self.projecttarget_set.all():
|
||||
BRTarget.objects.create(req = br, target = t.target, task = t.task)
|
||||
for v in self.projectvariable_set.all():
|
||||
BRVariable.objects.create(req = br, name = v.name, value = v.value)
|
||||
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
br.save()
|
||||
except Exception as e:
|
||||
br.delete()
|
||||
raise e
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
br.save()
|
||||
return br
|
||||
|
||||
class Build(models.Model):
|
||||
@@ -248,10 +131,10 @@ class Build(models.Model):
|
||||
|
||||
def eta(self):
|
||||
from django.utils import timezone
|
||||
eta = timezone.now()
|
||||
eta = 0
|
||||
completeper = self.completeper()
|
||||
if self.completeper() > 0:
|
||||
eta += ((eta - self.started_on)*(100-completeper))/completeper
|
||||
eta = timezone.now() + ((timezone.now() - self.started_on)*(100-completeper)/completeper)
|
||||
return eta
|
||||
|
||||
|
||||
@@ -259,38 +142,12 @@ class Build(models.Model):
|
||||
tgts = Target.objects.filter(build_id = self.id).order_by( 'target' );
|
||||
return( tgts );
|
||||
|
||||
@property
|
||||
def toaster_exceptions(self):
|
||||
return self.logmessage_set.filter(level=LogMessage.EXCEPTION)
|
||||
|
||||
|
||||
# an Artifact is anything that results from a Build, and may be of interest to the user, and is not stored elsewhere
|
||||
class BuildArtifact(models.Model):
|
||||
build = models.ForeignKey(Build)
|
||||
file_name = models.FilePathField()
|
||||
file_size = models.IntegerField()
|
||||
|
||||
|
||||
def get_local_file_name(self):
|
||||
try:
|
||||
deploydir = Variable.objects.get(build = self.build, variable_name="DEPLOY_DIR").variable_value
|
||||
return self.file_name[len(deploydir)+1:]
|
||||
except:
|
||||
raise
|
||||
|
||||
return self.file_name
|
||||
|
||||
|
||||
def is_available(self):
|
||||
if settings.MANAGED and build.project is not None:
|
||||
return build.buildrequest.environment.has_artifact(file_path)
|
||||
return False
|
||||
|
||||
class ProjectTarget(models.Model):
|
||||
project = models.ForeignKey(Project)
|
||||
target = models.CharField(max_length=100)
|
||||
task = models.CharField(max_length=100, null=True)
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class Target(models.Model):
|
||||
search_allowed_fields = ['target', 'file_name']
|
||||
build = models.ForeignKey(Build)
|
||||
@@ -302,7 +159,7 @@ class Target(models.Model):
|
||||
def package_count(self):
|
||||
return Target_Installed_Package.objects.filter(target_id__exact=self.id).count()
|
||||
|
||||
def __unicode__(self):
|
||||
def __str__(self):
|
||||
return self.target
|
||||
|
||||
class Target_Image_File(models.Model):
|
||||
@@ -338,6 +195,10 @@ class Target_File(models.Model):
|
||||
sym_target = models.ForeignKey('Target_File', related_name="symlink_set", null=True)
|
||||
|
||||
|
||||
class TaskManager(models.Manager):
|
||||
def related_setscene(self, task_object):
|
||||
return Task.objects.filter(task_executed=True, build = task_object.build, recipe = task_object.recipe, task_name=task_object.task_name+"_setscene")
|
||||
|
||||
class Task(models.Model):
|
||||
|
||||
SSTATE_NA = 0
|
||||
@@ -392,8 +253,10 @@ class Task(models.Model):
|
||||
|
||||
search_allowed_fields = [ "recipe__name", "recipe__version", "task_name", "logfile" ]
|
||||
|
||||
objects = TaskManager()
|
||||
|
||||
def get_related_setscene(self):
|
||||
return Task.objects.filter(task_executed=True, build = self.build, recipe = self.recipe, task_name=self.task_name+"_setscene")
|
||||
return Task.objects.related_setscene(self)
|
||||
|
||||
def get_outcome_text(self):
|
||||
return Task.TASK_OUTCOME[self.outcome + 1][1]
|
||||
@@ -413,14 +276,11 @@ class Task(models.Model):
|
||||
return "Not Executed"
|
||||
|
||||
def get_description(self):
|
||||
if '_helptext' in vars(self) and self._helptext != None:
|
||||
return self._helptext
|
||||
helptext = HelpText.objects.filter(key=self.task_name, area=HelpText.VARIABLE, build=self.build)
|
||||
try:
|
||||
self._helptext = HelpText.objects.get(key=self.task_name, area=HelpText.VARIABLE, build=self.build).text
|
||||
except HelpText.DoesNotExist:
|
||||
self._helptext = None
|
||||
|
||||
return self._helptext
|
||||
return helptext[0].text
|
||||
except IndexError:
|
||||
return ''
|
||||
|
||||
build = models.ForeignKey(Build, related_name='task_build')
|
||||
order = models.IntegerField(null=True)
|
||||
@@ -428,7 +288,7 @@ class Task(models.Model):
|
||||
outcome = models.IntegerField(choices=TASK_OUTCOME, default=OUTCOME_NA)
|
||||
sstate_checksum = models.CharField(max_length=100, blank=True)
|
||||
path_to_sstate_obj = models.FilePathField(max_length=500, blank=True)
|
||||
recipe = models.ForeignKey('Recipe', related_name='tasks')
|
||||
recipe = models.ForeignKey('Recipe', related_name='build_recipe')
|
||||
task_name = models.CharField(max_length=100)
|
||||
source_url = models.FilePathField(max_length=255, blank=True)
|
||||
work_directory = models.FilePathField(max_length=255, blank=True)
|
||||
@@ -444,9 +304,6 @@ class Task(models.Model):
|
||||
outcome_text = property(get_outcome_text)
|
||||
sstate_text = property(get_sstate_text)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%d(%d) %s:%s" % (self.pk, self.build.pk, self.recipe.name, self.task_name)
|
||||
|
||||
class Meta:
|
||||
ordering = ('order', 'recipe' ,)
|
||||
unique_together = ('build', 'recipe', 'task_name', )
|
||||
@@ -464,7 +321,7 @@ class Package(models.Model):
|
||||
installed_name = models.CharField(max_length=100, default='')
|
||||
version = models.CharField(max_length=100, blank=True)
|
||||
revision = models.CharField(max_length=32, blank=True)
|
||||
summary = models.TextField(blank=True)
|
||||
summary = models.CharField(max_length=200, blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
size = models.IntegerField(default=0)
|
||||
installed_size = models.IntegerField(default=0)
|
||||
@@ -497,10 +354,10 @@ class Package_Dependency(models.Model):
|
||||
(TYPE_RREPLACES, "replaces"),
|
||||
(TYPE_RCONFLICTS, "conflicts"),
|
||||
)
|
||||
""" Indexed by dep_type, in view order, key for short name and help
|
||||
''' Indexed by dep_type, in view order, key for short name and help
|
||||
description which when viewed will be printf'd with the
|
||||
package name.
|
||||
"""
|
||||
'''
|
||||
DEPENDS_DICT = {
|
||||
TYPE_RDEPENDS : ("depends", "%s is required to run %s"),
|
||||
TYPE_TRDEPENDS : ("depends", "%s is required to run %s"),
|
||||
@@ -528,7 +385,7 @@ class Package_File(models.Model):
|
||||
size = models.IntegerField()
|
||||
|
||||
class Recipe(models.Model):
|
||||
search_allowed_fields = ['name', 'version', 'file_path', 'section', 'summary', 'description', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__layer__local_path', 'layer_version__layer_source__name']
|
||||
search_allowed_fields = ['name', 'version', 'file_path', 'section', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__layer__local_path']
|
||||
|
||||
layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this recipe
|
||||
up_id = models.IntegerField(null = True, default = None) # id of entry in the source
|
||||
@@ -537,7 +394,7 @@ class Recipe(models.Model):
|
||||
name = models.CharField(max_length=100, blank=True) # pn
|
||||
version = models.CharField(max_length=100, blank=True) # pv
|
||||
layer_version = models.ForeignKey('Layer_Version', related_name='recipe_layer_version')
|
||||
summary = models.TextField(blank=True)
|
||||
summary = models.CharField(max_length=100, blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
section = models.CharField(max_length=100, blank=True)
|
||||
license = models.CharField(max_length=200, blank=True)
|
||||
@@ -545,6 +402,11 @@ class Recipe(models.Model):
|
||||
bugtracker = models.URLField(blank=True)
|
||||
file_path = models.FilePathField(max_length=255)
|
||||
|
||||
def get_vcs_link_url(self):
|
||||
if self.layer_version.layer.vcs_web_file_base_url is None:
|
||||
return ""
|
||||
return self.layer_version.layer.vcs_web_file_base_url.replace('%path%', self.file_path).replace('%branch%', self.layer_version.up_branch.name)
|
||||
|
||||
def get_layersource_view_url(self):
|
||||
if self.layer_source is None:
|
||||
return ""
|
||||
@@ -555,15 +417,6 @@ class Recipe(models.Model):
|
||||
def __unicode__(self):
|
||||
return "Recipe " + self.name + ":" + self.version
|
||||
|
||||
def get_local_path(self):
|
||||
if settings.MANAGED and self.layer_version.build.project is not None:
|
||||
return self.file_path[len(self.layer_version.layer.local_path)+1:]
|
||||
|
||||
return self.file_path
|
||||
|
||||
class Meta:
|
||||
unique_together = ("layer_version", "file_path")
|
||||
|
||||
class Recipe_DependencyManager(models.Manager):
|
||||
use_for_related_fields = True
|
||||
|
||||
@@ -585,7 +438,6 @@ class Recipe_Dependency(models.Model):
|
||||
|
||||
|
||||
class Machine(models.Model):
|
||||
search_allowed_fields = ["name", "description", "layer_version__layer__name"]
|
||||
layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this machine
|
||||
up_id = models.IntegerField(null = True, default = None) # id of entry in the source
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
@@ -594,11 +446,6 @@ class Machine(models.Model):
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.CharField(max_length=255)
|
||||
|
||||
def get_vcs_machine_file_link_url(self):
|
||||
path = 'conf/machine/'+self.name+'.conf'
|
||||
|
||||
return self.layer_version.get_vcs_file_link_url(path)
|
||||
|
||||
def __unicode__(self):
|
||||
return "Machine " + self.name + "(" + self.description + ")"
|
||||
|
||||
@@ -622,47 +469,33 @@ class LayerSource(models.Model):
|
||||
|
||||
TYPE_LOCAL = 0
|
||||
TYPE_LAYERINDEX = 1
|
||||
TYPE_IMPORTED = 2
|
||||
SOURCE_TYPE = (
|
||||
(TYPE_LOCAL, "local"),
|
||||
(TYPE_LAYERINDEX, "layerindex"),
|
||||
(TYPE_IMPORTED, "imported"),
|
||||
)
|
||||
|
||||
name = models.CharField(max_length=63, unique = True)
|
||||
name = models.CharField(max_length=63)
|
||||
sourcetype = models.IntegerField(choices=SOURCE_TYPE)
|
||||
apiurl = models.CharField(max_length=255, null=True, default=None)
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Updates the local database information from the upstream layer source
|
||||
"""
|
||||
raise Exception("Abstract, update() must be implemented by all LayerSource-derived classes (object is %s)" % str(vars(self)))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if isinstance(self, LocalLayerSource):
|
||||
self.sourcetype = LayerSource.TYPE_LOCAL
|
||||
elif isinstance(self, LayerIndexLayerSource):
|
||||
self.sourcetype = LayerSource.TYPE_LAYERINDEX
|
||||
elif isinstance(self, ImportedLayerSource):
|
||||
self.sourcetype = LayerSource.TYPE_IMPORTED
|
||||
elif self.sourcetype == None:
|
||||
raise Exception("Unknown LayerSource-derived class. If you added a new layer source type, fill out all code stubs.")
|
||||
raise Exception("Invalid LayerSource type")
|
||||
return super(LayerSource, self).save(*args, **kwargs)
|
||||
|
||||
def get_object(self):
|
||||
if self.sourcetype == LayerSource.TYPE_LOCAL:
|
||||
self.__class__ = LocalLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_LAYERINDEX:
|
||||
self.__class__ = LayerIndexLayerSource
|
||||
elif self.sourcetype == LayerSource.TYPE_IMPORTED:
|
||||
self.__class__ = ImportedLayerSource
|
||||
else:
|
||||
raise Exception("Unknown LayerSource type. If you added a new layer source type, fill out all code stubs.")
|
||||
if self.sourcetype is not None:
|
||||
if self.sourcetype == LayerSource.TYPE_LOCAL:
|
||||
self.__class__ = LocalLayerSource
|
||||
if self.sourcetype == LayerSource.TYPE_LAYERINDEX:
|
||||
self.__class__ = LayerIndexLayerSource
|
||||
return self
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.name, self.sourcetype)
|
||||
return "LS " + self.sourcetype + " " + self.name
|
||||
|
||||
|
||||
class LocalLayerSource(LayerSource):
|
||||
@@ -674,26 +507,11 @@ class LocalLayerSource(LayerSource):
|
||||
self.sourcetype = LayerSource.TYPE_LOCAL
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
'''
|
||||
Fetches layer, recipe and machine information from local repository
|
||||
"""
|
||||
'''
|
||||
pass
|
||||
|
||||
class ImportedLayerSource(LayerSource):
|
||||
class Meta(LayerSource._meta.__class__):
|
||||
proxy = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ImportedLayerSource, self).__init__(args, kwargs)
|
||||
self.sourcetype = LayerSource.TYPE_IMPORTED
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
Fetches layer, recipe and machine information from local repository
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class LayerIndexLayerSource(LayerSource):
|
||||
class Meta(LayerSource._meta.__class__):
|
||||
proxy = True
|
||||
@@ -703,67 +521,42 @@ class LayerIndexLayerSource(LayerSource):
|
||||
self.sourcetype = LayerSource.TYPE_LAYERINDEX
|
||||
|
||||
def get_object_view(self, branch, objectype, upid):
|
||||
if self != branch.layer_source:
|
||||
raise Exception("Invalid branch specification")
|
||||
return self.apiurl + "../branch/" + branch.name + "/" + objectype + "/?q=" + str(upid)
|
||||
|
||||
def update(self):
|
||||
"""
|
||||
'''
|
||||
Fetches layer, recipe and machine information from remote repository
|
||||
"""
|
||||
'''
|
||||
assert self.apiurl is not None
|
||||
from django.db import IntegrityError
|
||||
from django.db import transaction, connection
|
||||
|
||||
import httplib, urlparse, json
|
||||
import os
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
|
||||
def _get_json_response(apiurl = self.apiurl):
|
||||
conn = None
|
||||
_parsedurl = urlparse.urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
query = _parsedurl.query
|
||||
def parse_url(url):
|
||||
parsedurl = urlparse.urlparse(url)
|
||||
try:
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
except ValueError:
|
||||
host = parsedurl.netloc
|
||||
port = None
|
||||
|
||||
if port is None:
|
||||
port = 80
|
||||
else:
|
||||
port = int(port)
|
||||
return (host, port)
|
||||
|
||||
if proxy_settings is None:
|
||||
host, port = parse_url(apiurl)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", path + "?" + query)
|
||||
import httplib, urlparse, json
|
||||
parsedurl = urlparse.urlparse(apiurl)
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
if port is None:
|
||||
port = 80
|
||||
else:
|
||||
host, port = parse_url(proxy_settings)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", apiurl)
|
||||
|
||||
port = int(port)
|
||||
#print "-- connect to: http://%s:%s%s?%s" % (host, port, parsedurl.path, parsedurl.query)
|
||||
conn = httplib.HTTPConnection(host, port)
|
||||
conn.request("GET", parsedurl.path + "?" + parsedurl.query)
|
||||
r = conn.getresponse()
|
||||
if r.status != 200:
|
||||
raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
|
||||
raise Exception("Failed to read " + parsedurl.path + ": %d %s" % (r.status, r.reason))
|
||||
return json.loads(r.read())
|
||||
|
||||
# verify we can get the basic api
|
||||
try:
|
||||
apilinks = _get_json_response()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
if proxy_settings is not None:
|
||||
print "EE: Using proxy ", proxy_settings
|
||||
print "EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc(e))
|
||||
except:
|
||||
print "EE: could not connect to %s, skipping update" % self.apiurl
|
||||
return
|
||||
|
||||
# update branches; only those that we already have names listed in the Releases table
|
||||
whitelist_branch_names = map(lambda x: x.branch_name, Release.objects.all())
|
||||
# update branches; only those that we already have names listed in the database
|
||||
whitelist_branch_names = map(lambda x: x.name, Branch.objects.all())
|
||||
|
||||
print "Fetching branches"
|
||||
branches_info = _get_json_response(apilinks['branches']
|
||||
+ "?filter=name:%s" % "OR".join(whitelist_branch_names))
|
||||
for bi in branches_info:
|
||||
@@ -771,160 +564,90 @@ class LayerIndexLayerSource(LayerSource):
|
||||
b.up_id = bi['id']
|
||||
b.up_date = bi['updated']
|
||||
b.name = bi['name']
|
||||
b.bitbake_branch = bi['bitbake_branch']
|
||||
b.short_description = bi['short_description']
|
||||
b.save()
|
||||
|
||||
# update layers
|
||||
layers_info = _get_json_response(apilinks['layerItems'])
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
for li in layers_info:
|
||||
l, created = Layer.objects.get_or_create(layer_source = self, name = li['name'])
|
||||
l.up_id = li['id']
|
||||
l, created = Layer.objects.get_or_create(layer_source = self, up_id = li['id'])
|
||||
l.up_date = li['updated']
|
||||
l.name = li['name']
|
||||
l.vcs_url = li['vcs_url']
|
||||
l.vcs_web_url = li['vcs_web_url']
|
||||
l.vcs_web_tree_base_url = li['vcs_web_tree_base_url']
|
||||
l.vcs_web_file_base_url = li['vcs_web_file_base_url']
|
||||
l.summary = li['summary']
|
||||
l.description = li['description']
|
||||
l.save()
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
|
||||
# update layerbranches/layer_versions
|
||||
print "Fetching layer information"
|
||||
layerbranches_info = _get_json_response(apilinks['layerBranches']
|
||||
+ "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] ))
|
||||
+ "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), Branch.objects.filter(layer_source = self)))
|
||||
)
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
for lbi in layerbranches_info:
|
||||
lv, created = Layer_Version.objects.get_or_create(layer_source = self,
|
||||
up_id = lbi['id'],
|
||||
layer=Layer.objects.get(layer_source = self, up_id = lbi['layer'])
|
||||
)
|
||||
lv, created = Layer_Version.objects.get_or_create(layer_source = self, up_id = lbi['id'])
|
||||
|
||||
lv.up_date = lbi['updated']
|
||||
lv.layer = Layer.objects.get(layer_source = self, up_id = lbi['layer'])
|
||||
lv.up_branch = Branch.objects.get(layer_source = self, up_id = lbi['branch'])
|
||||
lv.branch = lbi['actual_branch']
|
||||
lv.commit = lbi['actual_branch']
|
||||
lv.commit = lbi['vcs_last_rev']
|
||||
lv.dirpath = lbi['vcs_subdir']
|
||||
lv.save()
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
|
||||
# update layer dependencies
|
||||
layerdependencies_info = _get_json_response(apilinks['layerDependencies'])
|
||||
dependlist = {}
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
for ldi in layerdependencies_info:
|
||||
try:
|
||||
lv = Layer_Version.objects.get(layer_source = self, up_id = ldi['layerbranch'])
|
||||
except Layer_Version.DoesNotExist as e:
|
||||
continue
|
||||
|
||||
if lv not in dependlist:
|
||||
dependlist[lv] = []
|
||||
try:
|
||||
dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch))
|
||||
except Layer_Version.DoesNotExist as e:
|
||||
print "Cannot find layer version ", self, ldi['dependency'], lv.up_branch
|
||||
raise e
|
||||
|
||||
for lv in dependlist:
|
||||
LayerVersionDependency.objects.filter(layer_version = lv).delete()
|
||||
for lvd in dependlist[lv]:
|
||||
LayerVersionDependency.objects.get_or_create(layer_version = lv, depends_on = lvd)
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
|
||||
|
||||
# update machines
|
||||
print "Fetching machine information"
|
||||
machines_info = _get_json_response(apilinks['machines']
|
||||
+ "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
|
||||
)
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
for mi in machines_info:
|
||||
mo, created = Machine.objects.get_or_create(layer_source = self, up_id = mi['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch']))
|
||||
mo, created = Machine.objects.get_or_create(layer_source = self, up_id = mi['id'])
|
||||
mo.up_date = mi['updated']
|
||||
mo.layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch'])
|
||||
mo.name = mi['name']
|
||||
mo.description = mi['description']
|
||||
mo.save()
|
||||
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
|
||||
# update recipes; paginate by layer version / layer branch
|
||||
print "Fetching target information"
|
||||
recipes_info = _get_json_response(apilinks['recipes']
|
||||
+ "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
|
||||
)
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(False)
|
||||
for ri in recipes_info:
|
||||
try:
|
||||
ro, created = Recipe.objects.get_or_create(layer_source = self, up_id = ri['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = ri['layerbranch']))
|
||||
ro.up_date = ri['updated']
|
||||
ro.name = ri['pn']
|
||||
ro.version = ri['pv']
|
||||
ro.summary = ri['summary']
|
||||
ro.description = ri['description']
|
||||
ro.section = ri['section']
|
||||
ro.license = ri['license']
|
||||
ro.homepage = ri['homepage']
|
||||
ro.bugtracker = ri['bugtracker']
|
||||
ro.file_path = ri['filepath'] + "/" + ri['filename']
|
||||
ro.save()
|
||||
except:
|
||||
#print "Duplicate Recipe, ignoring: ", vars(ro)
|
||||
pass
|
||||
if not connection.features.autocommits_when_autocommit_is_off:
|
||||
transaction.set_autocommit(True)
|
||||
ro, created = Recipe.objects.get_or_create(layer_source = self, up_id = ri['id'])
|
||||
|
||||
ro.up_date = ri['updated']
|
||||
ro.layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch'])
|
||||
|
||||
ro.name = ri['pn']
|
||||
ro.version = ri['pv']
|
||||
ro.summary = ri['summary']
|
||||
ro.description = ri['description']
|
||||
ro.section = ri['section']
|
||||
ro.license = ri['license']
|
||||
ro.homepage = ri['homepage']
|
||||
ro.bugtracker = ri['bugtracker']
|
||||
ro.file_path = ri['filepath'] + ri['filename']
|
||||
ro.save()
|
||||
|
||||
pass
|
||||
|
||||
class BitbakeVersion(models.Model):
|
||||
|
||||
name = models.CharField(max_length=32, unique = True)
|
||||
giturl = GitURLField()
|
||||
giturl = models.URLField()
|
||||
branch = models.CharField(max_length=32)
|
||||
dirpath = models.CharField(max_length=255)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (Branch: %s)" % (self.name, self.branch)
|
||||
|
||||
|
||||
class Release(models.Model):
|
||||
""" A release is a project template, used to pre-populate Project settings with a configuration set """
|
||||
name = models.CharField(max_length=32, unique = True)
|
||||
description = models.CharField(max_length=255)
|
||||
bitbake_version = models.ForeignKey(BitbakeVersion)
|
||||
branch_name = models.CharField(max_length=50, default = "")
|
||||
helptext = models.TextField(null=True)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s (%s)" % (self.name, self.branch_name)
|
||||
|
||||
class ReleaseLayerSourcePriority(models.Model):
|
||||
""" Each release selects layers from the set up layer sources, ordered by priority """
|
||||
release = models.ForeignKey("Release")
|
||||
layer_source = models.ForeignKey("LayerSource")
|
||||
priority = models.IntegerField(default = 0)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s-%s:%d" % (self.release.name, self.layer_source.name, self.priority)
|
||||
class Meta:
|
||||
unique_together = (('release', 'layer_source'),)
|
||||
branch = models.CharField(max_length=32)
|
||||
|
||||
|
||||
class ReleaseDefaultLayer(models.Model):
|
||||
release = models.ForeignKey(Release)
|
||||
layer_name = models.CharField(max_length=100, default="")
|
||||
layer = models.ForeignKey('Layer')
|
||||
|
||||
|
||||
# Branch class is synced with layerindex.Branch, branches can only come from remote layer indexes
|
||||
@@ -934,6 +657,7 @@ class Branch(models.Model):
|
||||
up_date = models.DateTimeField(null = True, default = None)
|
||||
|
||||
name = models.CharField(max_length=50)
|
||||
bitbake_branch = models.CharField(max_length=50, blank=True)
|
||||
short_description = models.CharField(max_length=50, blank=True)
|
||||
|
||||
class Meta:
|
||||
@@ -953,16 +677,14 @@ class Layer(models.Model):
|
||||
name = models.CharField(max_length=100)
|
||||
local_path = models.FilePathField(max_length=255, null = True, default = None)
|
||||
layer_index_url = models.URLField()
|
||||
vcs_url = GitURLField(default = None, null = True)
|
||||
vcs_web_url = models.URLField(null = True, default = None)
|
||||
vcs_web_tree_base_url = models.URLField(null = True, default = None)
|
||||
vcs_url = models.URLField(default = None, null = True)
|
||||
vcs_web_file_base_url = models.URLField(null = True, default = None)
|
||||
|
||||
summary = models.TextField(help_text='One-line description of the layer', null = True, default = None)
|
||||
summary = models.CharField(max_length=200, help_text='One-line description of the layer', null = True, default = None)
|
||||
description = models.TextField(null = True, default = None)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s / %s " % (self.name, self.layer_source)
|
||||
return "L " + self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = (("layer_source", "up_id"), ("layer_source", "name"))
|
||||
@@ -970,7 +692,7 @@ class Layer(models.Model):
|
||||
|
||||
# LayerCommit class is synced with layerindex.LayerBranch
|
||||
class Layer_Version(models.Model):
|
||||
search_allowed_fields = ["layer__name", "layer__summary", "layer__description", "layer__vcs_url", "dirpath", "up_branch__name", "commit", "branch"]
|
||||
search_allowed_fields = ["layer__name", "layer__summary",]
|
||||
build = models.ForeignKey(Build, related_name='layer_version_build', default = None, null = True)
|
||||
layer = models.ForeignKey(Layer, related_name='layer_version_layer')
|
||||
|
||||
@@ -984,106 +706,8 @@ class Layer_Version(models.Model):
|
||||
dirpath = models.CharField(max_length=255, null = True, default = None) # LayerBranch.vcs_subdir
|
||||
priority = models.IntegerField(default = 0) # if -1, this is a default layer
|
||||
|
||||
project = models.ForeignKey('Project', null = True, default = None) # Set if this layer is project-specific; always set for imported layers, and project-set branches
|
||||
|
||||
# code lifted, with adaptations, from the layerindex-web application https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
|
||||
def _handle_url_path(self, base_url, path):
|
||||
import re, posixpath
|
||||
if base_url:
|
||||
if self.dirpath:
|
||||
if path:
|
||||
extra_path = self.dirpath + '/' + path
|
||||
# Normalise out ../ in path for usage URL
|
||||
extra_path = posixpath.normpath(extra_path)
|
||||
# Minor workaround to handle case where subdirectory has been added between branches
|
||||
# (should probably support usage URL per branch to handle this... sigh...)
|
||||
if extra_path.startswith('../'):
|
||||
extra_path = extra_path[3:]
|
||||
else:
|
||||
extra_path = self.dirpath
|
||||
else:
|
||||
extra_path = path
|
||||
branchname = self.up_branch.name
|
||||
url = base_url.replace('%branch%', branchname)
|
||||
|
||||
# If there's a % in the path (e.g. a wildcard bbappend) we need to encode it
|
||||
if extra_path:
|
||||
extra_path = extra_path.replace('%', '%25')
|
||||
|
||||
if '%path%' in base_url:
|
||||
if extra_path:
|
||||
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '\\1', url)
|
||||
else:
|
||||
url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '', url)
|
||||
return url.replace('%path%', extra_path)
|
||||
else:
|
||||
return url + extra_path
|
||||
return None
|
||||
|
||||
def get_vcs_link_url(self):
|
||||
if self.layer.vcs_web_url is None:
|
||||
return None
|
||||
return self.layer.vcs_web_url
|
||||
|
||||
def get_vcs_file_link_url(self, file_path=""):
|
||||
if self.layer.vcs_web_file_base_url is None:
|
||||
return None
|
||||
return self._handle_url_path(self.layer.vcs_web_file_base_url, file_path)
|
||||
|
||||
def get_vcs_dirpath_link_url(self):
|
||||
if self.layer.vcs_web_tree_base_url is None:
|
||||
return None
|
||||
return self._handle_url_path(self.layer.vcs_web_tree_base_url, '')
|
||||
|
||||
def get_equivalents_wpriority(self, project):
|
||||
""" Returns an ordered layerversion list that satisfies a LayerVersionDependency using the layer name and the current Project Releases' LayerSource priority """
|
||||
|
||||
# layers created for this project, or coming from a build inthe project
|
||||
query = Q(project = project) | Q(build__project = project)
|
||||
if self.up_branch is not None:
|
||||
# the same up_branch name
|
||||
query |= Q(up_branch__name=self.up_branch.name)
|
||||
else:
|
||||
# or we have a layer in the project that's similar to mine (See the layer.name constraint below)
|
||||
query |= Q(projectlayer__project=project)
|
||||
|
||||
candidate_layer_versions = list(Layer_Version.objects.filter(layer__name = self.layer.name).filter(query).select_related('layer_source', 'layer', 'up_branch').order_by("-id"))
|
||||
|
||||
# optimization - if we have only one, we don't need no stinking sort
|
||||
if len(candidate_layer_versions) == 1:
|
||||
return candidate_layer_versions
|
||||
|
||||
# raise Exception(candidate_layer_versions)
|
||||
|
||||
release_priorities = {}
|
||||
|
||||
for ls_id, prio in map(lambda x: (x.layer_source_id, x.priority), project.release.releaselayersourcepriority_set.all().order_by("-priority")):
|
||||
release_priorities[ls_id] = prio
|
||||
|
||||
def _get_ls_priority(ls):
|
||||
# if there is no layer source, we have minus infinite priority, as we don't want this layer selected
|
||||
if ls == None:
|
||||
return -10000
|
||||
try:
|
||||
return release_priorities[ls.id]
|
||||
except IndexError:
|
||||
raise Exception("Unknown %d %s" % (ls.id, release_priorities))
|
||||
|
||||
return sorted( candidate_layer_versions ,
|
||||
key = lambda x: _get_ls_priority(x.layer_source),
|
||||
reverse = True)
|
||||
|
||||
def get_vcs_reference(self):
|
||||
if self.commit is not None and len(self.commit) > 0:
|
||||
return self.commit
|
||||
if self.branch is not None and len(self.branch) > 0:
|
||||
return self.branch
|
||||
if self.up_branch is not None:
|
||||
return self.up_branch.name
|
||||
return ("Cannot determine the vcs_reference for layer version %s" % vars(self))
|
||||
|
||||
def __unicode__(self):
|
||||
return "%d %s (VCS %s, Project %s)" % (self.pk, str(self.layer), self.get_vcs_reference(), self.build.project if self.build is not None else "No project")
|
||||
return "LV " + str(self.layer) + " " + self.commit
|
||||
|
||||
class Meta:
|
||||
unique_together = ("layer_source", "up_id")
|
||||
@@ -1103,12 +727,6 @@ class ProjectLayer(models.Model):
|
||||
layercommit = models.ForeignKey(Layer_Version, null=True)
|
||||
optional = models.BooleanField(default = True)
|
||||
|
||||
def __unicode__(self):
|
||||
return "%s, %s" % (self.project.name, self.layercommit)
|
||||
|
||||
class Meta:
|
||||
unique_together = (("project", "layercommit"),)
|
||||
|
||||
class ProjectVariable(models.Model):
|
||||
project = models.ForeignKey(Project)
|
||||
name = models.CharField(max_length=100)
|
||||
@@ -1141,15 +759,13 @@ class HelpText(models.Model):
|
||||
text = models.TextField()
|
||||
|
||||
class LogMessage(models.Model):
|
||||
EXCEPTION = -1 # used to signal self-toaster-exceptions
|
||||
INFO = 0
|
||||
WARNING = 1
|
||||
ERROR = 2
|
||||
|
||||
LOG_LEVEL = ( (INFO, "info"),
|
||||
(WARNING, "warn"),
|
||||
(ERROR, "error"),
|
||||
(EXCEPTION, "toaster exception"))
|
||||
(ERROR, "error") )
|
||||
|
||||
build = models.ForeignKey(Build)
|
||||
task = models.ForeignKey(Task, blank = True, null=True)
|
||||
|
||||
@@ -1,25 +1,16 @@
|
||||
from django.test import TestCase
|
||||
from orm.models import LocalLayerSource, LayerIndexLayerSource, ImportedLayerSource, LayerSource
|
||||
from orm.models import LocalLayerSource, LayerIndexLayerSource, LayerSource
|
||||
from orm.models import Branch
|
||||
|
||||
from orm.models import Project, Build, Layer, Layer_Version, Branch, ProjectLayer
|
||||
from orm.models import Release, ReleaseLayerSourcePriority, BitbakeVersion
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
# tests to verify inheritance for the LayerSource proxy-inheritance classes
|
||||
class LayerSourceVerifyInheritanceSaveLoad(TestCase):
|
||||
def test_object_creation(self):
|
||||
lls = LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LOCAL, apiurl = "")
|
||||
lils = LayerSource.objects.create(name = "a2", sourcetype = LayerSource.TYPE_LAYERINDEX, apiurl = "")
|
||||
imls = LayerSource.objects.create(name = "a3", sourcetype = LayerSource.TYPE_IMPORTED, apiurl = "")
|
||||
lils = LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LAYERINDEX, apiurl = "")
|
||||
|
||||
import pprint
|
||||
pprint.pprint([(x.__class__,vars(x)) for x in LayerSource.objects.all()])
|
||||
print LayerSource.objects.all()
|
||||
|
||||
self.assertTrue(True in map(lambda x: isinstance(x, LocalLayerSource), LayerSource.objects.all()))
|
||||
self.assertTrue(True in map(lambda x: isinstance(x, LayerIndexLayerSource), LayerSource.objects.all()))
|
||||
self.assertTrue(True in map(lambda x: isinstance(x, ImportedLayerSource), LayerSource.objects.all()))
|
||||
|
||||
def test_duplicate_error(self):
|
||||
def duplicate():
|
||||
@@ -27,9 +18,9 @@ class LayerSourceVerifyInheritanceSaveLoad(TestCase):
|
||||
LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LOCAL, apiurl = "")
|
||||
|
||||
self.assertRaises(Exception, duplicate)
|
||||
|
||||
|
||||
|
||||
# test to verify the layer source update functionality for layerindex. edit to pass the URL to a layerindex application
|
||||
class LILSUpdateTestCase(TestCase):
|
||||
def test_update(self):
|
||||
lils = LayerSource.objects.create(name = "b1", sourcetype = LayerSource.TYPE_LAYERINDEX, apiurl = "http://adamian-desk.local:8080/layerindex/api/")
|
||||
@@ -40,126 +31,3 @@ class LILSUpdateTestCase(TestCase):
|
||||
|
||||
# print vars(lils)
|
||||
#print map(lambda x: vars(x), Branch.objects.all())
|
||||
|
||||
# run asserts
|
||||
self.assertTrue(lils.branch_set.all().count() > 0, "update() needs to fetch some branches")
|
||||
|
||||
|
||||
|
||||
# tests to verify layer_version priority selection
|
||||
class LayerVersionEquivalenceTestCase(TestCase):
|
||||
def setUp(self):
|
||||
# create layer sources
|
||||
ls = LayerSource.objects.create(name = "dummy-layersource", sourcetype = LayerSource.TYPE_LOCAL)
|
||||
|
||||
# create bitbake version
|
||||
bbv = BitbakeVersion.objects.create(name="master", giturl="git://git.openembedded.org/bitbake")
|
||||
# create release
|
||||
release = Release.objects.create(name="default-release", bitbake_version = bbv, branch_name = "master")
|
||||
# attach layer source to release
|
||||
ReleaseLayerSourcePriority.objects.create(release = release, layer_source = ls, priority = 1)
|
||||
|
||||
# create layer attach
|
||||
self.layer = Layer.objects.create(name="meta-testlayer", layer_source = ls)
|
||||
# create branch
|
||||
self.branch = Branch.objects.create(name="master", layer_source = ls)
|
||||
|
||||
# set a layer version for the layer on the specified branch
|
||||
self.layerversion = Layer_Version.objects.create(layer = self.layer, layer_source = ls, up_branch = self.branch)
|
||||
|
||||
# create spoof layer that should not appear in the search results
|
||||
Layer_Version.objects.create(layer = Layer.objects.create(name="meta-notvalid", layer_source = ls), layer_source = ls, up_branch = self.branch)
|
||||
|
||||
|
||||
# create a project ...
|
||||
self.project = Project.objects.create_project(name="test-project", release = release)
|
||||
# ... and set it up with a single layer version
|
||||
ProjectLayer.objects.create(project= self.project, layercommit = self.layerversion)
|
||||
|
||||
def test_single_layersource(self):
|
||||
# when we have a single layer version, get_equivalents_wpriority() should return a list with just this layer_version
|
||||
equivalent_list = self.layerversion.get_equivalents_wpriority(self.project)
|
||||
self.assertTrue(len(equivalent_list) == 1)
|
||||
self.assertTrue(equivalent_list[0] == self.layerversion)
|
||||
|
||||
def test_dual_layersource(self):
|
||||
# if we have two layers with the same name, from different layer sources, we expect both layers in, in increasing priority of the layer source
|
||||
ls2 = LayerSource.objects.create(name = "dummy-layersource2", sourcetype = LayerSource.TYPE_LOCAL)
|
||||
|
||||
# assign a lower priority for the second layer source
|
||||
Release.objects.get(name="default-release").releaselayersourcepriority_set.create(layer_source = ls2, priority = 2)
|
||||
|
||||
# create a new layer_version for a layer with the same name coming from the second layer source
|
||||
self.layer2 = Layer.objects.create(name="meta-testlayer", layer_source = ls2)
|
||||
self.layerversion2 = Layer_Version.objects.create(layer = self.layer2, layer_source = ls2, up_branch = self.branch)
|
||||
|
||||
# expect two layer versions, in the priority order
|
||||
equivalent_list = self.layerversion.get_equivalents_wpriority(self.project)
|
||||
self.assertTrue(len(equivalent_list) == 2)
|
||||
self.assertTrue(equivalent_list[0] == self.layerversion2)
|
||||
self.assertTrue(equivalent_list[1] == self.layerversion)
|
||||
|
||||
def test_build_layerversion(self):
|
||||
# any layer version coming from the build should show up before any layer version coming from upstream
|
||||
build = Build.objects.create(project = self.project, started_on = timezone.now(), completed_on = timezone.now())
|
||||
self.layerversion_build = Layer_Version.objects.create(layer = self.layer, build = build, commit = "deadbeef")
|
||||
|
||||
# a build layerversion must be in the equivalence list for the original layerversion
|
||||
equivalent_list = self.layerversion.get_equivalents_wpriority(self.project)
|
||||
self.assertTrue(len(equivalent_list) == 2)
|
||||
self.assertTrue(equivalent_list[0] == self.layerversion)
|
||||
self.assertTrue(equivalent_list[1] == self.layerversion_build)
|
||||
|
||||
# getting the build layerversion equivalent list must return the same list as the original layer
|
||||
build_equivalent_list = self.layerversion_build.get_equivalents_wpriority(self.project)
|
||||
|
||||
self.assertTrue(equivalent_list == build_equivalent_list, "%s is not %s" % (equivalent_list, build_equivalent_list))
|
||||
|
||||
class ProjectLVSelectionTestCase(TestCase):
|
||||
def setUp(self):
|
||||
# create layer sources
|
||||
ls = LayerSource.objects.create(name = "dummy-layersource", sourcetype = LayerSource.TYPE_LOCAL)
|
||||
|
||||
# create bitbake version
|
||||
bbv = BitbakeVersion.objects.create(name="master", giturl="git://git.openembedded.org/bitbake")
|
||||
# create release
|
||||
release = Release.objects.create(name="default-release", bitbake_version = bbv, branch_name="master")
|
||||
# attach layer source to release
|
||||
ReleaseLayerSourcePriority.objects.create(release = release, layer_source = ls, priority = 1)
|
||||
|
||||
# create layer attach
|
||||
self.layer = Layer.objects.create(name="meta-testlayer", layer_source = ls)
|
||||
# create branch
|
||||
self.branch = Branch.objects.create(name="master", layer_source = ls)
|
||||
|
||||
# set a layer version for the layer on the specified branch
|
||||
self.layerversion = Layer_Version.objects.create(layer = self.layer, layer_source = ls, up_branch = self.branch)
|
||||
|
||||
|
||||
# create a project ...
|
||||
self.project = Project.objects.create_project(name="test-project", release = release)
|
||||
# ... and set it up with a single layer version
|
||||
ProjectLayer.objects.create(project= self.project, layercommit = self.layerversion)
|
||||
|
||||
def test_single_layersource(self):
|
||||
compatible_layerversions = self.project.compatible_layerversions()
|
||||
self.assertTrue(len(compatible_layerversions) == 1)
|
||||
self.assertTrue(compatible_layerversions[0] == self.layerversion)
|
||||
|
||||
|
||||
def test_dual_layersource(self):
|
||||
# if we have two layers with the same name, from different layer sources, we expect both layers in, in increasing priority of the layer source
|
||||
ls2 = LayerSource.objects.create(name = "dummy-layersource2", sourcetype = LayerSource.TYPE_LOCAL)
|
||||
|
||||
# assign a lower priority for the second layer source
|
||||
Release.objects.get(name="default-release").releaselayersourcepriority_set.create(layer_source = ls2, priority = 2)
|
||||
|
||||
# create a new layer_version for a layer with the same name coming from the second layer source
|
||||
self.layer2 = Layer.objects.create(name="meta-testlayer", layer_source = ls2)
|
||||
self.layerversion2 = Layer_Version.objects.create(layer = self.layer2, layer_source = ls2, up_branch = self.branch)
|
||||
|
||||
# expect two layer versions, in the priority order
|
||||
equivalent_list = self.project.compatible_layerversions()
|
||||
self.assertTrue(len(equivalent_list) == 2)
|
||||
self.assertTrue(equivalent_list[0] == self.layerversion2)
|
||||
self.assertTrue(equivalent_list[1] == self.layerversion)
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from django.conf.urls import patterns, include, url
|
||||
from django.views.generic import RedirectView
|
||||
|
||||
urlpatterns = patterns('orm.views',
|
||||
# landing point for pushing a bitbake_eventlog.json file to this toaster instace
|
||||
url(r'^eventfile$', 'eventfile', name='eventfile'),
|
||||
)
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from django.http import HttpResponseBadRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
from django.utils.html import escape
|
||||
from datetime import timedelta
|
||||
from django.utils import formats
|
||||
from toastergui.templatetags.projecttags import json as jsonfilter
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import subprocess
|
||||
import toastermain
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
def eventfile(request):
|
||||
""" Receives a file by POST, and runs toaster-eventreply on this file """
|
||||
if request.method != "POST":
|
||||
return HttpResponseBadRequest("This API only accepts POST requests. Post a file with:\n\ncurl -F eventlog=@bitbake_eventlog.json http[s]://[server-address]/orm/eventfile\n", content_type="text/plain;utf8")
|
||||
|
||||
# write temporary file
|
||||
(handle, abstemppath) = tempfile.mkstemp(dir="/tmp/")
|
||||
with os.fdopen(handle, "w") as tmpfile:
|
||||
for chunk in request.FILES['eventlog'].chunks():
|
||||
tmpfile.write(chunk)
|
||||
tmpfile.close()
|
||||
|
||||
# compute the path to "bitbake/bin/toaster-eventreplay"
|
||||
from os.path import dirname as DN
|
||||
import_script = os.path.join(DN(DN(DN(DN(os.path.abspath(__file__))))), "bin/toaster-eventreplay")
|
||||
if not os.path.exists(import_script):
|
||||
raise Exception("script missing %s" % import_script)
|
||||
scriptenv = os.environ.copy()
|
||||
scriptenv["DATABASE_URL"] = toastermain.settings.getDATABASE_URL()
|
||||
|
||||
# run the data loading process and return the results
|
||||
(out, err) = subprocess.Popen([import_script, abstemppath], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=scriptenv).communicate()
|
||||
os.remove(abstemppath)
|
||||
return HttpResponse("%s\n%s" % (out, err), content_type="text/plain;utf8")
|
||||
@@ -2,27 +2,16 @@
|
||||
.logo img { height: 30px; width: auto !important; }
|
||||
.logo { padding-top: 4px !important; padding-bottom:0px !important; }
|
||||
|
||||
/* style the version information */
|
||||
.brand > a { color: #777; }
|
||||
.brand > a:hover { color: #999; text-decoration: none; }
|
||||
.icon-info-sign { color: #777; font-size: 16px; margin-left: 5px;}
|
||||
.icon-info-sign:hover { color: #999; cursor: pointer; }
|
||||
|
||||
/* Style the breadcrumb */
|
||||
.breadcrumb { display: inline-block; background-color: transparent; }
|
||||
.section { margin: 20px 0; }
|
||||
|
||||
/* Styles for the help information */
|
||||
.get-help { color: #CCCCCC; }
|
||||
.get-help:hover, .icon-plus-sign:hover { color: #999999; cursor: pointer; }
|
||||
.get-help:hover { color: #999999; cursor: pointer; }
|
||||
.get-help-blue { color: #3A87AD; }
|
||||
.get-help-blue:hover { color: #005580; cursor: pointer; }
|
||||
.get-help-yellow { color: #C09853; }
|
||||
.get-help-yellow:hover { color: #B38942; cursor: pointer; }
|
||||
.get-help-red { color: #B94A48; font-size: 16px; padding-left: 2px; }
|
||||
.get-help-red:hover { color: #943A38; cursor: pointer; }
|
||||
.build-form>i:first-of-type { margin-left: 5px; }
|
||||
.manual { margin: 11px 15px;}
|
||||
.manual { margin-top: 11px; }
|
||||
.heading-help { font-size: 14px; }
|
||||
|
||||
/* Styles for the external link */
|
||||
@@ -44,7 +33,7 @@ dd code, .alert code { white-space: pre-wrap; word-break: break-all; word-wrap:
|
||||
dd ul { list-style-type: none; margin: 0px; }
|
||||
dt, dd {line-height: 25px; }
|
||||
dd li { line-height: 25px; }
|
||||
.item-info dd { line-height: 20px; margin-bottom: 10px; }
|
||||
dd p { line-height: 20px; }
|
||||
|
||||
/* Style the filter modal dialogs */
|
||||
.modal { width: 800px; margin-left: -400px; }
|
||||
@@ -55,7 +44,6 @@ dd li { line-height: 25px; }
|
||||
|
||||
/* Some extra space before headings when needed */
|
||||
.details { margin-top: 30px; }
|
||||
.air { margin-top: 30px; }
|
||||
|
||||
/* Required classes for the highlight behaviour in tables */
|
||||
.highlight { -webkit-animation: target-fade 10s 1; -moz-animation: target-fade 10s 1; animation: target-fade 10s 1; }
|
||||
@@ -67,12 +55,11 @@ dd li { line-height: 25px; }
|
||||
.tooltip { z-index: 2000 !important; }
|
||||
|
||||
/* Override default Twitter Boostrap styles for anchor tags inside tables */
|
||||
td a, td a > code { color: #333333; }
|
||||
td code { white-space: normal; }
|
||||
td a:hover, td a > code:hover { color: #000000; text-decoration: underline; }
|
||||
td a { color: #333333; }
|
||||
td a:hover { color: #000000; text-decoration: underline; }
|
||||
|
||||
/* Override default Twitter Bootstrap styles for tr.error */
|
||||
.table tbody tr.error > td { background-color: transparent; } /* override default Bootstrap behaviour */
|
||||
.table tbody tr.error > td { background-color: #FFFFFF; } /* override default Bootstrap behaviour */
|
||||
.table-hover tbody tr.error:hover > td { background-color: #F5F5F5;} /* override default Bootstrap behaviour */
|
||||
|
||||
/* Right justify Bootstrap table columns for size fields */
|
||||
@@ -109,10 +96,6 @@ th > a, th > span { font-weight: normal; }
|
||||
.content-directory a:hover { color: #005580; text-decoration: underline; }
|
||||
.symlink { color: #CCCCCC; }
|
||||
|
||||
/* Styles for the navbar actions */
|
||||
.btn-group + .btn-group { margin-right: 10px; }
|
||||
.navbar-inner > .btn-group { margin-top: 6px; }
|
||||
|
||||
/* Other styles */
|
||||
.dropdown-menu { padding: 10px; }
|
||||
select { width: auto; }
|
||||
@@ -120,57 +103,25 @@ select { width: auto; }
|
||||
.top-air { margin-top: 40px;}
|
||||
.progress { margin-bottom: 0px; }
|
||||
.lead .badge { font-size: 18px; font-weight: normal; border-radius: 15px; padding: 9px; }
|
||||
.lead ol, .lead ul { padding: 10px 0 0 20px; }
|
||||
.lead ol > li, .lead ul > li {
|
||||
line-height: 35px;
|
||||
}
|
||||
.well > .lead, .alert .lead { margin-bottom: 0px; }
|
||||
.well-transparent { background-color: transparent; }
|
||||
.no-results { margin: 10px 0; }
|
||||
.task-name { margin-left: 7px; }
|
||||
.icon-hand-right {color: #CCCCCC; }
|
||||
.help-inline { margin: 5px; }
|
||||
.dashboard-section { background-color: transparent; }
|
||||
|
||||
/* styles for landing page - analysis mode */
|
||||
.hero-unit { margin: 20px 0 30px; }
|
||||
.hero-unit > .close { font-size:40px; }
|
||||
.hero-actions { margin-top: 30px; }
|
||||
|
||||
/* styles for landing page - build mode */
|
||||
.hero-unit p { line-height: 25px; }
|
||||
.hero-unit p, .hero-unit .btn-large { margin-top: 15px; }
|
||||
.hero-unit ul { margin-top: 20px; }
|
||||
.hero-unit li { line-height: 30px; }
|
||||
.hero-unit img { background-color: #eee; margin-top: 15px; }
|
||||
.dashboard-section { background-color: transparent; }
|
||||
|
||||
/* make tables Chrome-happy (me, not so much) */
|
||||
#otable { table-layout: fixed; word-wrap: break-word; }
|
||||
|
||||
/* styles for the new build button */
|
||||
.new-build .btn-primary { padding: 4px 30px; }
|
||||
.new-build .alert { margin-top: 10px; }
|
||||
.new-build .alert p { margin-top: 10px; }
|
||||
|
||||
/* styles for showing the project name in build mode */
|
||||
.project-name { padding-top: 0; }
|
||||
.project-name .label { font-weight: normal; margin-bottom: 5px; margin-left: -15px; padding: 5px; }
|
||||
.project-name .label > a { color: #fff; font-weight: normal; }
|
||||
|
||||
/* Remove bottom margin for forms inside modal dialogs */
|
||||
#dependencies_modal_form { margin-bottom: 0px; }
|
||||
|
||||
/* Configuration styles */
|
||||
.icon-trash { color: #B94A48; font-size: 16px; padding-left: 5px; }
|
||||
.icon-trash { color: #B94A48; font-size: 16px; padding-left: 2px; }
|
||||
.icon-trash:hover { color: #943A38; text-decoration: none; cursor: pointer; }
|
||||
.icon-pencil, .icon-download-alt, .icon-refresh, .icon-star-empty, .icon-star, .icon-tasks { font-size: 16px; color: #0088CC; padding-left: 2px; }
|
||||
.icon-pencil:hover, .icon-download-alt:hover, .icon-refresh:hover, .icon-star-empty:hover, .icon-star:hover, .icon-tasks:hover { color: #005580; text-decoration: none; cursor: pointer; }
|
||||
.icon-share { padding-left: 2px; }
|
||||
.alert-success .icon-refresh, .alert-success .icon-tasks { color: #468847; }
|
||||
.alert-success .icon-refresh:hover, .alert-success .icon-tasks:hover { color: #347132; }
|
||||
.alert-error .icon-refresh, .alert-error .icon-tasks { color: #b94a48; }
|
||||
.alert-error .icon-refresh:hover, .alert-error .icon-tasks:hover { color: #943A38; }
|
||||
.configuration-list li, .configuration-list label { line-height: 35px; font-size: 21px; font-weight: 200; margin-bottom: 0px;}
|
||||
.icon-pencil, .icon-download-alt { font-size: 16px; color: #0088CC; padding-left: 2px; }
|
||||
.icon-pencil:hover, .icon-download-alt:hover { color: #005580; text-decoration: none; cursor: pointer; }
|
||||
.configuration-list li { line-height: 35px; font-size: 21px; font-weight: 200; }
|
||||
.configuration-list { font-size: 16px; margin-bottom: 1.5em; }
|
||||
.configuration-list i { font-size: 16px; }
|
||||
/*.configuration-layers { height: 135px; overflow: scroll; }*/
|
||||
@@ -181,78 +132,15 @@ select { width: auto; }
|
||||
.configuration-alert p { margin-bottom: 0px; }
|
||||
fieldset { padding-left: 19px; }
|
||||
.project-form { margin-top: 10px; }
|
||||
.add-layers .btn-block + .btn-block, .build .btn-block + .btn-block { margin-top: 0px; }
|
||||
.add-layers .btn-block + .btn-block { margin-top: 0px; }
|
||||
input.huge { font-size: 17.5px; padding: 11px 19px; }
|
||||
.build-form { margin-bottom: 0px; }
|
||||
.build-form .input-append { margin-bottom: 0px; }
|
||||
.build-form .btn-large { padding: 11px 35px; }
|
||||
.build-form p { font-size:17.5px ;margin:12px 0 0 10px;}
|
||||
#layer-container form, #target-container form { margin-bottom: 0px; }
|
||||
.btn-primary .icon-question-sign, .btn-danger .icon-question-sign { color: #fff; }
|
||||
.btn-primary .icon-question-sign:hover, .btn-danger .icon-question-sign:hover { color: #999; }
|
||||
.build-form { margin-bottom: 0px; padding-left: 20px; }
|
||||
a code { color: #0088CC; }
|
||||
a code:hover { color: #005580; }
|
||||
.localconf { font-size: 17.5px; margin-top: 40px; }
|
||||
.localconf code { font-size: 17.5px; }
|
||||
#add-layer-dependencies { margin-top: 5px; }
|
||||
.link-action { font-size: 17.5px; margin-top: 40px; }
|
||||
.link-action code { font-size: 17.5px; }
|
||||
.artifact { width: 9em; }
|
||||
.control-group { margin-bottom: 0px; }
|
||||
#project-details form { margin: 0px; }
|
||||
dd form { margin: 10px 0 0 0; }
|
||||
dl textarea { resize: vertical; }
|
||||
.navbar-fixed-top { z-index: 1; }
|
||||
.popover { z-index: 2; }
|
||||
.btn-danger .icon-trash { color: #fff; }
|
||||
.bbappends { list-style-type: none; margin-left: 0; }
|
||||
.bbappends li { line-height: 25px; }
|
||||
.configuration-list input[type="checkbox"] { margin-top:13px;margin-right:10px; }
|
||||
.alert input[type="checkbox"] { margin-top: 0px; margin-right: 3px; }
|
||||
.alert ol { padding: 10px 0px 0px 20px; }
|
||||
.alert ol > li { line-height: 35px; }
|
||||
.dl-vertical form { margin-top: 10px; }
|
||||
.scrolling { border: 1px solid #dddddd; height: 154px; overflow: auto; padding: 8px; width: 27.5%; margin-bottom: 10px; }
|
||||
.lead .help-block { font-size: 14px; line-height: 20px; font-weight: normal; }
|
||||
.button-place .btn { margin: 0 0 20px 0; }
|
||||
.tooltip-inner { max-width: 250px; }
|
||||
.new-build { padding: 20px; }
|
||||
.new-build li { line-height: 30px; }
|
||||
.new-build h6 { margin: 10px 0 0 0; color: #5a5a5a; }
|
||||
.new-build h3 { margin: 0; color: #5a5a5a; }
|
||||
.new-build form { margin: 5px 0 0; }
|
||||
.new-build .input-append { margin-bottom: 0; }
|
||||
#build-selected { margin-top: 15px; }
|
||||
div.add-deps { margin-top: 15px; }
|
||||
.btn.log { margin-left: 20px; }
|
||||
|
||||
|
||||
.animate-repeat {
|
||||
list-style:none;
|
||||
box-sizing:border-box;
|
||||
}
|
||||
|
||||
.animate-repeat.ng-move,
|
||||
.animate-repeat.ng-enter,
|
||||
.animate-repeat.ng-leave {
|
||||
-webkit-transition:all linear 0.5s;
|
||||
transition:all linear 0.5s;
|
||||
}
|
||||
|
||||
.animate-repeat.ng-leave.ng-leave-active,
|
||||
.animate-repeat.ng-move,
|
||||
.animate-repeat.ng-enter {
|
||||
opacity:0;
|
||||
}
|
||||
|
||||
.animate-repeat.ng-leave,
|
||||
.animate-repeat.ng-enter.ng-enter-active {
|
||||
opacity:1;
|
||||
}
|
||||
|
||||
@keyframes machines-fade { 0% { background-color: #D9EDF7; } 25% { background-color: #D9EDF7; } 100% { background-color: white; } }
|
||||
@-webkit-keyframes machines-fade { 0% { background-color: #D9EDF7; } 25% { background-color: #D9EDF7; } 100% { background-color: white; } }
|
||||
|
||||
.machines-highlight { -webkit-animation: machines-fade 7s 1; -moz-animation: machines-fade 7s 1; animation: machines-fade 7s 1; }
|
||||
|
||||
.tab-pane table { margin-top: 10px; }
|
||||
|
||||
|
Before Width: | Height: | Size: 418 B |
|
Before Width: | Height: | Size: 312 B |
|
Before Width: | Height: | Size: 205 B |
|
Before Width: | Height: | Size: 262 B |
|
Before Width: | Height: | Size: 348 B |
|
Before Width: | Height: | Size: 207 B |
|
Before Width: | Height: | Size: 5.7 KiB |
|
Before Width: | Height: | Size: 278 B |
|
Before Width: | Height: | Size: 328 B |
|
Before Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 4.4 KiB |
|
Before Width: | Height: | Size: 4.4 KiB |