mirror of
https://git.yoctoproject.org/poky
synced 2026-02-04 07:48:43 +01:00
Compare commits
383 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6717d19848 | ||
|
|
aeb31e09f0 | ||
|
|
26f0c306cb | ||
|
|
d65bcef0ae | ||
|
|
f1729cdbfa | ||
|
|
925e46623a | ||
|
|
b8603a494d | ||
|
|
dc2ee2feb7 | ||
|
|
9901828b0d | ||
|
|
ad89b1fe80 | ||
|
|
ff505d781d | ||
|
|
905938d0a1 | ||
|
|
1b58412449 | ||
|
|
2cd70c9150 | ||
|
|
9e9bb64fb5 | ||
|
|
39ec0738d4 | ||
|
|
3d7df7b5b5 | ||
|
|
0b1be9dc67 | ||
|
|
b9ec9f7425 | ||
|
|
9e4aad97c3 | ||
|
|
240da75616 | ||
|
|
b2ba41b575 | ||
|
|
56bd68e82c | ||
|
|
0fb598c6b9 | ||
|
|
b0c1820261 | ||
|
|
ccd470ba5f | ||
|
|
90a33dde44 | ||
|
|
b9da1f441b | ||
|
|
ccbb7ef72f | ||
|
|
cafdccb29c | ||
|
|
13eda67126 | ||
|
|
91c507ce1c | ||
|
|
97e9be8130 | ||
|
|
523aaea8e2 | ||
|
|
e625a82af2 | ||
|
|
efde5a1303 | ||
|
|
f1bb6acacc | ||
|
|
7effa6edd0 | ||
|
|
02e603e48c | ||
|
|
2d80a6bc8a | ||
|
|
159f66aea7 | ||
|
|
6b8f7999c3 | ||
|
|
827dc7f12c | ||
|
|
7c0d759c55 | ||
|
|
9ca89fe495 | ||
|
|
c82164fd0a | ||
|
|
586a3d5ff5 | ||
|
|
7849633469 | ||
|
|
367b862d59 | ||
|
|
c4e9d9d9ae | ||
|
|
7a43fb95d1 | ||
|
|
46e8377c42 | ||
|
|
148b7d20d4 | ||
|
|
d759301a34 | ||
|
|
134246d3d4 | ||
|
|
c088bac2f0 | ||
|
|
9766c76268 | ||
|
|
78b1cbcc72 | ||
|
|
3a4ee6bfd9 | ||
|
|
9bb6f7f3f0 | ||
|
|
15919f7e76 | ||
|
|
1e216c8087 | ||
|
|
a67b95ade2 | ||
|
|
1e668ccf1a | ||
|
|
e3dd621197 | ||
|
|
30b8d9378b | ||
|
|
895c86d71d | ||
|
|
07a7905689 | ||
|
|
e76d790bbf | ||
|
|
b60383c1b9 | ||
|
|
19dc8bf950 | ||
|
|
6c576a4ac8 | ||
|
|
9f5ea81070 | ||
|
|
51a5a5df84 | ||
|
|
6c9eb8a67a | ||
|
|
b8e521809b | ||
|
|
994b637d58 | ||
|
|
a85328245d | ||
|
|
61da1b1197 | ||
|
|
3428e70035 | ||
|
|
dbf99ab134 | ||
|
|
18d859a8c9 | ||
|
|
313864bf52 | ||
|
|
e93f9a8382 | ||
|
|
df4a397df9 | ||
|
|
9b3389e023 | ||
|
|
4b22a21b4e | ||
|
|
27a877becf | ||
|
|
2d5bd89565 | ||
|
|
bda51ee782 | ||
|
|
5dd12beccd | ||
|
|
1d21eaf4e0 | ||
|
|
95821e8566 | ||
|
|
57138de0fc | ||
|
|
57a806cc32 | ||
|
|
452619ba41 | ||
|
|
8d28013312 | ||
|
|
570345adfd | ||
|
|
9062377624 | ||
|
|
0143097095 | ||
|
|
4b1b580749 | ||
|
|
65ed47e597 | ||
|
|
608ac7794f | ||
|
|
4dada3c092 | ||
|
|
43903a5bfd | ||
|
|
e464615684 | ||
|
|
7a12eda785 | ||
|
|
d88fa68141 | ||
|
|
3a93bfe1c7 | ||
|
|
9fee4d138b | ||
|
|
87e924e377 | ||
|
|
8de7e102cf | ||
|
|
6780f20525 | ||
|
|
1d04721fe8 | ||
|
|
a4d8015687 | ||
|
|
7b57145498 | ||
|
|
38b1b68923 | ||
|
|
ec3c8fcf81 | ||
|
|
b2f045c400 | ||
|
|
87671f72e7 | ||
|
|
b028947d67 | ||
|
|
f5847d4f24 | ||
|
|
59198004c0 | ||
|
|
984be33145 | ||
|
|
96ee64c96b | ||
|
|
7a8f9114bc | ||
|
|
21ac977e2a | ||
|
|
33a4425d6d | ||
|
|
8abf510a13 | ||
|
|
3ff30c0bfb | ||
|
|
7e68f57dec | ||
|
|
3b998b3f4c | ||
|
|
d8155f1f95 | ||
|
|
e378410fb2 | ||
|
|
3a1d9e9e11 | ||
|
|
26dcc54c60 | ||
|
|
8e05d5e3fe | ||
|
|
5c1f10f56e | ||
|
|
0843e07873 | ||
|
|
793afb3e81 | ||
|
|
aebbf8c8f3 | ||
|
|
c4f1f0f491 | ||
|
|
810dd79720 | ||
|
|
b0ce70ffa8 | ||
|
|
ac2d94b684 | ||
|
|
ce2336ddc7 | ||
|
|
5b8c5ea151 | ||
|
|
7a42bfecc2 | ||
|
|
26db62e359 | ||
|
|
18224a4a46 | ||
|
|
6d0ae0ef44 | ||
|
|
153787d4df | ||
|
|
c55dea6a82 | ||
|
|
4ab29fc58f | ||
|
|
0bc0ee66a8 | ||
|
|
db6819b0c3 | ||
|
|
bc3484e76c | ||
|
|
9d84b2440d | ||
|
|
f8e61ed564 | ||
|
|
1db22d39b5 | ||
|
|
b7bf8bb051 | ||
|
|
839892ed27 | ||
|
|
232af2ec04 | ||
|
|
6a6bd2e96b | ||
|
|
3103f04a30 | ||
|
|
bb27ca7562 | ||
|
|
7dcd9a6b72 | ||
|
|
a43dba8c29 | ||
|
|
d52b91316e | ||
|
|
efbf15ce20 | ||
|
|
3caae900f3 | ||
|
|
3428e6e0e4 | ||
|
|
96ca984621 | ||
|
|
8386f4203d | ||
|
|
b4b50e52d2 | ||
|
|
6101dd2b4c | ||
|
|
ebf62ba85d | ||
|
|
9deb3333b0 | ||
|
|
5bb9a05e0f | ||
|
|
9ee3f77ed9 | ||
|
|
a714cf8700 | ||
|
|
d376e31c92 | ||
|
|
333e5f7076 | ||
|
|
a2fa51bdde | ||
|
|
cb468dfaf0 | ||
|
|
d4c5f12601 | ||
|
|
7e6902963f | ||
|
|
c166a5add3 | ||
|
|
303d17ac3c | ||
|
|
28938930ba | ||
|
|
bff6db6712 | ||
|
|
62b1fef787 | ||
|
|
fc9229e4ba | ||
|
|
8509c1a7e5 | ||
|
|
05d751c23a | ||
|
|
b8f6c7c794 | ||
|
|
474ea6b826 | ||
|
|
21d15fac0e | ||
|
|
e98512e1e3 | ||
|
|
1f80e7f675 | ||
|
|
6a4a66aabb | ||
|
|
c03bb4d0c7 | ||
|
|
f91b780b1a | ||
|
|
938e925356 | ||
|
|
c899777010 | ||
|
|
afb6a3688f | ||
|
|
4209379cc8 | ||
|
|
6add5ac648 | ||
|
|
af515ca686 | ||
|
|
f9f97a1fed | ||
|
|
48169ac9bc | ||
|
|
f091b8a3cf | ||
|
|
38083d01e7 | ||
|
|
278c551168 | ||
|
|
83d1ce9e27 | ||
|
|
d44881fecc | ||
|
|
948b8461e8 | ||
|
|
7bcc609bf0 | ||
|
|
f372806546 | ||
|
|
2361a8171b | ||
|
|
ee4d106987 | ||
|
|
896511d564 | ||
|
|
01c613e4bc | ||
|
|
295dd76931 | ||
|
|
cd7e7addd7 | ||
|
|
ac9725acc5 | ||
|
|
b6124bdbfb | ||
|
|
989013222e | ||
|
|
87eaf4cf4a | ||
|
|
8e22337e22 | ||
|
|
e130d2c8eb | ||
|
|
a692a9182a | ||
|
|
44fddc9ba1 | ||
|
|
8bbd5958b0 | ||
|
|
6d898aef4c | ||
|
|
412cb58083 | ||
|
|
57ccbc4c15 | ||
|
|
5d3c54a318 | ||
|
|
e5727ad31a | ||
|
|
0cafa0eafe | ||
|
|
bea6067392 | ||
|
|
c056b5e9a2 | ||
|
|
7bb4692ead | ||
|
|
dea4a69cfc | ||
|
|
53d2def225 | ||
|
|
94e2a1793e | ||
|
|
b20ba9c4e5 | ||
|
|
7f4ff1a5c5 | ||
|
|
8fd7098318 | ||
|
|
9662a47204 | ||
|
|
78366c7e2c | ||
|
|
287c3bec51 | ||
|
|
80f625a364 | ||
|
|
978c6c00d6 | ||
|
|
090cb60d49 | ||
|
|
2784c08229 | ||
|
|
b1ab59a8d0 | ||
|
|
3141bc16a5 | ||
|
|
b057375f77 | ||
|
|
4f0c5e5b32 | ||
|
|
9fb409bcc5 | ||
|
|
3d95a1cce5 | ||
|
|
361ddb10de | ||
|
|
133472e7aa | ||
|
|
e4b9dabfbb | ||
|
|
45dbb4a080 | ||
|
|
517c2cc88d | ||
|
|
a1958d47c6 | ||
|
|
6547137fa3 | ||
|
|
651f3dc078 | ||
|
|
8333887235 | ||
|
|
33a8687635 | ||
|
|
7fee883b8b | ||
|
|
e6ea60b131 | ||
|
|
342eff6b38 | ||
|
|
8e5103a026 | ||
|
|
52fa8b8582 | ||
|
|
7892063223 | ||
|
|
5f4a75f904 | ||
|
|
b4e7ebe227 | ||
|
|
9ac13c344b | ||
|
|
9b6c56a07d | ||
|
|
02faddb5ca | ||
|
|
77439dafd0 | ||
|
|
5709daae36 | ||
|
|
f0a153a7f6 | ||
|
|
f2103de785 | ||
|
|
ec984f1697 | ||
|
|
619c449b68 | ||
|
|
8bd20eb128 | ||
|
|
2645411074 | ||
|
|
d8b564530e | ||
|
|
af91e98e32 | ||
|
|
46c39b60c5 | ||
|
|
9153d11e6c | ||
|
|
de20bf01e4 | ||
|
|
56aaa6450b | ||
|
|
5ca9285434 | ||
|
|
7631f6bbfc | ||
|
|
08e2f06d36 | ||
|
|
424643f463 | ||
|
|
84396ed610 | ||
|
|
b28a902253 | ||
|
|
b94ebc582f | ||
|
|
07600df4cb | ||
|
|
00d8024741 | ||
|
|
aa39d9a2df | ||
|
|
98ad3cb2c0 | ||
|
|
88b7b1a88a | ||
|
|
ec1f93c50c | ||
|
|
6157ab451b | ||
|
|
5306aaab07 | ||
|
|
31ab5dafa8 | ||
|
|
84d524c938 | ||
|
|
7bbc4b8a77 | ||
|
|
33dfe60c35 | ||
|
|
78217d37d2 | ||
|
|
69d4c63428 | ||
|
|
1eb75407ae | ||
|
|
2c79d57ded | ||
|
|
de87ba4b37 | ||
|
|
14a666b094 | ||
|
|
84bcf66436 | ||
|
|
cba4a8b80d | ||
|
|
c23e7052fb | ||
|
|
7253253972 | ||
|
|
cddb415f72 | ||
|
|
6aed9f819d | ||
|
|
30ac79c16d | ||
|
|
3f00873a8a | ||
|
|
4f6fb8c362 | ||
|
|
a3dcfa6a6a | ||
|
|
e0999660a8 | ||
|
|
897b87195c | ||
|
|
1dfcb8968c | ||
|
|
3f7bfb38a2 | ||
|
|
5b09536d38 | ||
|
|
21cd3d6212 | ||
|
|
4dc19ba0a9 | ||
|
|
bbaf0c65f1 | ||
|
|
0cb01121eb | ||
|
|
f9c2b9083e | ||
|
|
3c8da7d5bc | ||
|
|
3e49cee7e8 | ||
|
|
0ba2239abb | ||
|
|
87a71c5017 | ||
|
|
761c6172f6 | ||
|
|
b958f2e6dc | ||
|
|
4123b4e575 | ||
|
|
9301072deb | ||
|
|
6d3e061287 | ||
|
|
3ff180c173 | ||
|
|
19b9fde3b2 | ||
|
|
bdc27cc405 | ||
|
|
7e30874db2 | ||
|
|
f9d0fd9bb1 | ||
|
|
3353d6bcce | ||
|
|
904c35e049 | ||
|
|
9ff3a1de42 | ||
|
|
9ab4d1f5e6 | ||
|
|
a0f9efe7d6 | ||
|
|
a6193f3822 | ||
|
|
f0cbff052e | ||
|
|
1929766ed5 | ||
|
|
bd1e9a6a3a | ||
|
|
409d3cb7a2 | ||
|
|
49efe23169 | ||
|
|
46c0518279 | ||
|
|
40396bee2b | ||
|
|
cdbe3b5cee | ||
|
|
aba074edbf | ||
|
|
f11e51056d | ||
|
|
9a178b6016 | ||
|
|
d8ee1658de | ||
|
|
b5c29e15f4 | ||
|
|
21da2dbb78 | ||
|
|
520b36fe41 | ||
|
|
82733c9f71 | ||
|
|
32857c5596 | ||
|
|
1d6146e0b1 | ||
|
|
a095826126 | ||
|
|
fd435cbfc5 | ||
|
|
6ca67b3288 |
@@ -52,13 +52,6 @@ The following boards are supported by the meta-yocto-bsp layer:
|
||||
For more information see the board's section below. The appropriate MACHINE
|
||||
variable value corresponding to the board is given in brackets.
|
||||
|
||||
Reference Board Maintenance
|
||||
===========================
|
||||
|
||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
||||
|
||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
||||
|
||||
Consumer Devices
|
||||
================
|
||||
@@ -324,22 +317,6 @@ Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
||||
=> bootm 1000000 - 2000000
|
||||
|
||||
--- Booting from JFFS2 root ---
|
||||
|
||||
1. First boot the board with NFS root.
|
||||
|
||||
2. Erase the MTD partition which will be used as root:
|
||||
|
||||
$ flash_eraseall /dev/mtd3
|
||||
|
||||
3. Copy the JFFS2 image to the MTD partition:
|
||||
|
||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
||||
|
||||
4. Then reboot the board and set up the environment in U-Boot:
|
||||
|
||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
||||
|
||||
|
||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
||||
==============================================
|
||||
|
||||
@@ -41,7 +41,7 @@ from bb import ui
|
||||
from bb import server
|
||||
from bb import cookerdata
|
||||
|
||||
__version__ = "1.25.0"
|
||||
__version__ = "1.22.0"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Python multiprocessing requires /dev/shm
|
||||
@@ -139,8 +139,8 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. Parameters are passed to the signature handling code, use 'none' if no specific handler is required.",
|
||||
action = "append", dest = "dump_signatures", default = [])
|
||||
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
@@ -148,7 +148,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-package environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
@@ -169,9 +169,6 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
|
||||
action = "store", dest = "servertype")
|
||||
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
@@ -203,31 +200,6 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
[host, port] = options.remote_server.split(":", 2)
|
||||
port = int(port)
|
||||
# use automatic port if port set to -1, means read it from
|
||||
# the bitbake.lock file; this is a bit tricky, but we always expect
|
||||
# to be in the base of the build directory if we need to have a
|
||||
# chance to start the server later, anyway
|
||||
if port == -1:
|
||||
lock_location = "./bitbake.lock"
|
||||
# we try to read the address at all times; if the server is not started,
|
||||
# we'll try to start it after the first connect fails, below
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
options.remote_server = remotedef
|
||||
except Exception as e:
|
||||
sys.exit("Failed to read bitbake.lock (%s), invalid port" % str(e))
|
||||
|
||||
return options, targets[1:]
|
||||
|
||||
|
||||
@@ -307,9 +279,6 @@ def main():
|
||||
if configParams.observe_only and (not configParams.remote_server or configParams.bind):
|
||||
sys.exit("FATAL: '--observe-only' can only be used by UI clients connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
sys.exit("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
if level > configuration.debug:
|
||||
@@ -338,17 +307,30 @@ def main():
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
server.saveConnectionConfigParams(configParams)
|
||||
|
||||
if not configParams.server_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except Exception as e:
|
||||
if configParams.status_only:
|
||||
try:
|
||||
server_connection = server.establishConnection(featureset)
|
||||
except:
|
||||
sys.exit(1)
|
||||
if not server_connection:
|
||||
sys.exit(1)
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
# Setup a connection to the server (cooker)
|
||||
server_connection = server.establishConnection(featureset)
|
||||
if not server_connection:
|
||||
if configParams.kill_server:
|
||||
sys.exit(0)
|
||||
bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
|
||||
bb.fatal("Server already killed")
|
||||
configParams.bind = configParams.remote_server
|
||||
start_server(servermodule, configParams, configuration, featureset)
|
||||
bb.event.ui_queue = []
|
||||
server_connection = server.establishConnection(featureset)
|
||||
|
||||
# Restore the environment in case the UI needs it
|
||||
for k in cleanedvars:
|
||||
@@ -356,23 +338,13 @@ def main():
|
||||
|
||||
logger.removeHandler(handler)
|
||||
|
||||
|
||||
if configParams.status_only:
|
||||
server_connection.terminate()
|
||||
sys.exit(0)
|
||||
|
||||
if configParams.kill_server:
|
||||
server_connection.connection.terminateServer()
|
||||
bb.event.ui_queue = []
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
@@ -556,7 +556,7 @@ Recipes are listed with the bbappends that apply to them as subitems.
|
||||
continue
|
||||
|
||||
basename = os.path.basename(filename)
|
||||
appends = self.bbhandler.cooker.collection.get_file_appends(basename)
|
||||
appends = self.bbhandler.cooker.collection.appendlist.get(basename)
|
||||
if appends:
|
||||
appended.append((basename, list(appends)))
|
||||
else:
|
||||
@@ -566,38 +566,33 @@ Recipes are listed with the bbappends that apply to them as subitems.
|
||||
def do_show_cross_depends(self, args):
|
||||
"""figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
usage: show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]
|
||||
usage: show-cross-depends [-f]
|
||||
|
||||
Figure out the dependency between recipes that crosses a layer boundary.
|
||||
|
||||
Options:
|
||||
-f show full file path
|
||||
-i ignore dependencies on items in the specified layer(s)
|
||||
|
||||
NOTE:
|
||||
The .bbappend file can impact the dependency.
|
||||
"""
|
||||
import optparse
|
||||
|
||||
parser = optparse.OptionParser(usage="show-cross-depends [-f] [-i layer1[,layer2[,layer3...]]]")
|
||||
parser.add_option("-f", "",
|
||||
action="store_true", dest="show_filenames")
|
||||
parser.add_option("-i", "",
|
||||
action="store", dest="ignore_layers", default="")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
ignore_layers = options.ignore_layers.split(',')
|
||||
|
||||
self.init_bbhandler()
|
||||
|
||||
show_filenames = False
|
||||
for arg in args.split():
|
||||
if arg == '-f':
|
||||
show_filenames = True
|
||||
else:
|
||||
sys.stderr.write("show-cross-depends: invalid option %s\n" % arg)
|
||||
self.do_help('')
|
||||
return
|
||||
|
||||
pkg_fn = self.bbhandler.cooker_data.pkg_fn
|
||||
bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
|
||||
self.require_re = re.compile(r"require\s+(.+)")
|
||||
self.include_re = re.compile(r"include\s+(.+)")
|
||||
self.inherit_re = re.compile(r"inherit\s+(.+)")
|
||||
|
||||
global_inherit = (self.bbhandler.config_data.getVar('INHERIT', True) or "").split()
|
||||
|
||||
# The bb's DEPENDS and RDEPENDS
|
||||
for f in pkg_fn:
|
||||
f = bb.cache.Cache.virtualfn2realfn(f)[0]
|
||||
@@ -612,7 +607,7 @@ The .bbappend file can impact the dependency.
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data,
|
||||
self.bbhandler.cooker_data.pkg_pn)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], options.show_filenames, ignore_layers)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], show_filenames)
|
||||
|
||||
# The RDPENDS
|
||||
all_rdeps = self.bbhandler.cooker_data.rundeps[f].values()
|
||||
@@ -626,33 +621,10 @@ The .bbappend file can impact the dependency.
|
||||
for rdep in all_rdeps:
|
||||
all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rdep)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rdep,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
|
||||
# The RRECOMMENDS
|
||||
all_rrecs = self.bbhandler.cooker_data.runrecs[f].values()
|
||||
# Remove the duplicated or null one.
|
||||
sorted_rrecs = {}
|
||||
# The all_rrecs is the list in list, so we need two for loops
|
||||
for k1 in all_rrecs:
|
||||
for k2 in k1:
|
||||
sorted_rrecs[k2] = 1
|
||||
all_rrecs = sorted_rrecs.keys()
|
||||
for rrec in all_rrecs:
|
||||
all_p = bb.providers.getRuntimeProviders(self.bbhandler.cooker_data, rrec)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rrec,
|
||||
self.bbhandler.config_data,
|
||||
self.bbhandler.cooker_data)[0][0]
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, options.show_filenames, ignore_layers)
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, show_filenames)
|
||||
|
||||
# The inherit class
|
||||
cls_re = re.compile('classes/')
|
||||
@@ -662,12 +634,9 @@ The .bbappend file can impact the dependency.
|
||||
# The inherits' format is [classes/cls, /path/to/classes/cls]
|
||||
# ignore the classes/cls.
|
||||
if not cls_re.match(cls):
|
||||
classname = os.path.splitext(os.path.basename(cls))[0]
|
||||
if classname in global_inherit:
|
||||
continue
|
||||
inherit_layername = self.get_file_layer(cls)
|
||||
if inherit_layername != layername and not inherit_layername in ignore_layers:
|
||||
if not options.show_filenames:
|
||||
if inherit_layername != layername:
|
||||
if not show_filenames:
|
||||
f_short = self.remove_layer_prefix(f)
|
||||
cls = self.remove_layer_prefix(cls)
|
||||
else:
|
||||
@@ -687,7 +656,7 @@ The .bbappend file can impact the dependency.
|
||||
if pv_re.search(needed_file) and f in self.bbhandler.cooker_data.pkg_pepvpr:
|
||||
pv = self.bbhandler.cooker_data.pkg_pepvpr[f][1]
|
||||
needed_file = re.sub(r"\${PV}", pv, needed_file)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, options.show_filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, show_filenames)
|
||||
line = fnfile.readline()
|
||||
fnfile.close()
|
||||
|
||||
@@ -714,22 +683,21 @@ The .bbappend file can impact the dependency.
|
||||
bbclass=".bbclass"
|
||||
# Find a 'require/include xxxx'
|
||||
if m:
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, options.show_filenames, ignore_layers)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, show_filenames)
|
||||
line = ffile.readline()
|
||||
ffile.close()
|
||||
|
||||
def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames, ignore_layers):
|
||||
def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames):
|
||||
"""Print the depends that crosses a layer boundary"""
|
||||
needed_file = bb.utils.which(bbpath, needed_filename)
|
||||
if needed_file:
|
||||
# Which layer is this file from
|
||||
needed_layername = self.get_file_layer(needed_file)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if needed_layername != layername:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
needed_file = self.remove_layer_prefix(needed_file)
|
||||
logger.plain("%s %s %s" %(f, keyword, needed_file))
|
||||
|
||||
def match_inherit(self, line):
|
||||
"""Match the inherit xxx line"""
|
||||
return (self.inherit_re.match(line), "inherits")
|
||||
@@ -743,11 +711,11 @@ The .bbappend file can impact the dependency.
|
||||
keyword = "includes"
|
||||
return (m, keyword)
|
||||
|
||||
def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames, ignore_layers):
|
||||
def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames):
|
||||
"""Print the DEPENDS/RDEPENDS file that crosses a layer boundary"""
|
||||
best_realfn = bb.cache.Cache.virtualfn2realfn(needed_file)[0]
|
||||
needed_layername = self.get_file_layer(best_realfn)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if needed_layername != layername:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
best_realfn = self.remove_layer_prefix(best_realfn)
|
||||
|
||||
@@ -12,18 +12,10 @@ import errno
|
||||
import signal
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
if len(sys.argv) != 2 or sys.argv[1] != "decafbad":
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
sys.exit(1)
|
||||
|
||||
profiling = False
|
||||
if sys.argv[1] == "decafbadbad":
|
||||
profiling = True
|
||||
try:
|
||||
import cProfile as profile
|
||||
except:
|
||||
import profile
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
@@ -89,11 +81,6 @@ def workerlog_write(msg):
|
||||
lf.write(msg)
|
||||
lf.flush()
|
||||
|
||||
def sigterm_handler(signum, frame):
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
sys.exit()
|
||||
|
||||
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
|
||||
# We need to setup the environment BEFORE the fork, since
|
||||
# a fork() or exec*() activates PSEUDO...
|
||||
@@ -142,13 +129,10 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
global worker_pipe
|
||||
pipein.close()
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
|
||||
# Save out the PID so that the event can include it the
|
||||
# events
|
||||
@@ -170,11 +154,15 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
data.setVar("BUILDNAME", workerdata["buildname"])
|
||||
data.setVar("DATE", workerdata["date"])
|
||||
data.setVar("TIME", workerdata["time"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
||||
bb.parse.siggen.set_taskdata(workerdata["hashes"], workerdata["hash_deps"], workerdata["sigchecksums"])
|
||||
ret = 0
|
||||
try:
|
||||
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
|
||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
||||
for h in workerdata["hashes"]:
|
||||
the_data.setVar("BBHASH_%s" % h, workerdata["hashes"][h])
|
||||
for h in workerdata["hash_deps"]:
|
||||
the_data.setVar("BBHASHDEPS_%s" % h, workerdata["hash_deps"][h])
|
||||
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
@@ -195,22 +183,11 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
logger.critical(str(exc))
|
||||
os._exit(1)
|
||||
try:
|
||||
if cfg.dry_run:
|
||||
return 0
|
||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
if not cfg.dry_run:
|
||||
ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
||||
os._exit(ret)
|
||||
except:
|
||||
os._exit(1)
|
||||
if not profiling:
|
||||
os._exit(child())
|
||||
else:
|
||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
ret = profile.Profile.runcall(prof, child)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
@@ -268,14 +245,9 @@ class BitbakeWorker(object):
|
||||
self.build_pipes = {}
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Worker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Worker recieved SIGTERM, shutting down...")
|
||||
self.handle_finishnow(None)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
@@ -390,16 +362,7 @@ class BitbakeWorker(object):
|
||||
|
||||
try:
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
else:
|
||||
profname = "profile-worker.log"
|
||||
prof = profile.Profile()
|
||||
try:
|
||||
profile.Profile.runcall(prof, worker.serve)
|
||||
finally:
|
||||
prof.dump_stats(profname)
|
||||
bb.utils.process_profilelog(profname)
|
||||
worker.serve()
|
||||
except BaseException as e:
|
||||
if not normalexit:
|
||||
import traceback
|
||||
|
||||
@@ -16,15 +16,9 @@
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
|
||||
# This script can be run in two modes.
|
||||
|
||||
# When used with "source", from a build directory,
|
||||
# it enables toaster event logging and starts the bitbake resident server.
|
||||
# use as: source toaster [start|stop] [noweb] [noui]
|
||||
|
||||
# When it is called as a stand-alone script, it starts just the
|
||||
# web server, and the building shall be done through the web interface.
|
||||
# As script, it will not return to the command prompt. Stop with Ctrl-C.
|
||||
# This script enables toaster event logging and
|
||||
# starts bitbake resident server
|
||||
# use as: source toaster [start|stop]
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
@@ -36,8 +30,6 @@ function webserverKillAll()
|
||||
while kill -0 $(< ${pidfile}) 2>/dev/null; do
|
||||
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
|
||||
sleep 1;
|
||||
# Kill processes if they are still running - may happen in interactive shells
|
||||
ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
|
||||
done;
|
||||
rm ${pidfile}
|
||||
fi
|
||||
@@ -46,12 +38,6 @@ function webserverKillAll()
|
||||
|
||||
function webserverStartAll()
|
||||
{
|
||||
# do not start if toastermain points to a valid process
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
fi
|
||||
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py syncdb || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=2
|
||||
@@ -63,19 +49,12 @@ function webserverStartAll()
|
||||
retval=0
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate orm || retval=1
|
||||
fi
|
||||
if [ "x$TOASTER_MANAGED" == "x1" ]; then
|
||||
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
|
||||
python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
|
||||
fi
|
||||
if [ $retval -eq 0 ]; then
|
||||
echo "Starting webserver"
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >${BUILDDIR}/toaster_web_$$.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
|
||||
sleep 1
|
||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
fi
|
||||
return $retval
|
||||
@@ -85,10 +64,8 @@ function webserverStartAll()
|
||||
|
||||
function addtoConfiguration()
|
||||
{
|
||||
file=$1
|
||||
shift
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
|
||||
for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
|
||||
echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
|
||||
echo $1 >> ${BUILDDIR}/conf/$2
|
||||
}
|
||||
|
||||
INSTOPSYSTEM=0
|
||||
@@ -103,7 +80,7 @@ function stop_system()
|
||||
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
|
||||
rm ${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
BBSERVER=0.0.0.0:-1 bitbake -m
|
||||
BBSERVER=localhost:8200 bitbake -m
|
||||
unset BBSERVER
|
||||
webserverKillAll
|
||||
# force stop any misbehaving bitbake server
|
||||
@@ -126,86 +103,36 @@ function notify_chldexit() {
|
||||
}
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
# read command line parameters
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
RUNNING=0
|
||||
|
||||
NOTOASTERUI=0
|
||||
WEBSERVER=1
|
||||
TOASTER_BRBE=""
|
||||
WEB_PORT="8000"
|
||||
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
noweb )
|
||||
WEBSERVER=0
|
||||
;;
|
||||
brbe=* )
|
||||
TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
|
||||
;;
|
||||
webport=*)
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
|
||||
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
|
||||
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
|
||||
# Start just the web server, point the web browser to the interface, and start any Django services.
|
||||
|
||||
if [ -n "$BUILDDIR" ]; then
|
||||
echo -e "Error: build/ directory detected. Toaster will not start in managed mode if a build environment is detected.\nUse a clean terminal to start Toaster." 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
# Define a fake builddir where only the pid files are actually created. No real builds will take place here.
|
||||
BUILDDIR=/tmp
|
||||
RUNNING=1
|
||||
function trap_ctrlc() {
|
||||
echo "** Stopping system"
|
||||
webserverKillAll
|
||||
RUNNING=0
|
||||
}
|
||||
TOASTER_MANAGED=1
|
||||
export TOASTER_MANAGED=1
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed to start the web server, stopping" 1>&2;
|
||||
exit 1;
|
||||
fi
|
||||
xdg-open http://0.0.0.0:$WEB_PORT/ >/dev/null 2>&1 &
|
||||
trap trap_ctrlc SIGINT
|
||||
echo "Running. Stop with Ctrl-C"
|
||||
while [ $RUNNING -gt 0 ]; do
|
||||
python $BBBASEDIR/lib/toaster/manage.py runbuilds
|
||||
sleep 1
|
||||
done
|
||||
echo "**** Exit"
|
||||
exit 0
|
||||
echo "Error: This script needs to be sourced. Please run as 'source toaster [start|stop]'" 1>&2;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# We make sure we're running in the current shell and in a good environment
|
||||
if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
|
||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2;
|
||||
return 2
|
||||
fi
|
||||
|
||||
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
|
||||
|
||||
|
||||
# Verify prerequisites
|
||||
|
||||
if ! echo "import django; print (1,5) == django.VERSION[0:2]" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs Django 1.5. Please install with\n\nsudo pip install django==1.5"
|
||||
return 2
|
||||
fi
|
||||
|
||||
if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
|
||||
echo -e "This program needs South 0.8.4. Please install with\n\nsudo pip install south==0.8.4"
|
||||
return 2
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
||||
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
|
||||
@@ -218,6 +145,15 @@ else
|
||||
fi;
|
||||
fi
|
||||
|
||||
NOTOASTERUI=0
|
||||
for param in $*; do
|
||||
case $param in
|
||||
noui )
|
||||
NOTOASTERUI=1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "The system will $CMD."
|
||||
|
||||
# Make sure it's safe to run by checking bitbake lock
|
||||
@@ -227,36 +163,30 @@ if [ -e $BUILDDIR/bitbake.lock ]; then
|
||||
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
|
||||
echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
|
||||
if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
|
||||
echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
|
||||
echo "If you see problems, stop and then start the system again." 2>&1
|
||||
return 3
|
||||
fi
|
||||
|
||||
if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
|
||||
echo "Error: bitbake appears to be dead, but the webserver is alive. Something fishy is going on." 1>&2
|
||||
echo "Cleaning up the web server at to start a clean slate."
|
||||
webserverKillAll
|
||||
fi
|
||||
|
||||
|
||||
# Execute the commands
|
||||
|
||||
case $CMD in
|
||||
start )
|
||||
start_success=1
|
||||
addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
|
||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
||||
addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
|
||||
if ! webserverStartAll; then
|
||||
echo "Failed ${CMD}."
|
||||
return 4
|
||||
fi
|
||||
unset BBSERVER
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
|
||||
bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:8200
|
||||
if [ $? -ne 0 ]; then
|
||||
start_success=0
|
||||
echo "Bitbake server start failed"
|
||||
else
|
||||
export BBSERVER=0.0.0.0:-1
|
||||
export BBSERVER=localhost:8200
|
||||
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
|
||||
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
|
||||
fi
|
||||
@@ -265,12 +195,10 @@ case $CMD in
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
echo "Successful ${CMD}."
|
||||
return 0
|
||||
else
|
||||
# failed start, do stop
|
||||
stop_system
|
||||
echo "Failed ${CMD}."
|
||||
return 1
|
||||
fi
|
||||
# stop system on terminal exit
|
||||
set -o monitor
|
||||
|
||||
@@ -53,6 +53,7 @@ fun! NewBBTemplate()
|
||||
put ='LICENSE = \"\"'
|
||||
put ='SECTION = \"\"'
|
||||
put ='DEPENDS = \"\"'
|
||||
put ='PR = \"r0\"'
|
||||
put =''
|
||||
put ='SRC_URI = \"\"'
|
||||
|
||||
|
||||
@@ -3,15 +3,9 @@
|
||||
|
||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl" />
|
||||
|
||||
<xsl:include href="../template/permalinks.xsl"/>
|
||||
<xsl:include href="../template/section.title.xsl"/>
|
||||
<xsl:include href="../template/component.title.xsl"/>
|
||||
<xsl:include href="../template/division.title.xsl"/>
|
||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
||||
|
||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
||||
<xsl:param name="chapter.autolabel" select="1" />
|
||||
<!-- <xsl:param name="appendix.autolabel" select="A" /> -->
|
||||
<xsl:param name="section.autolabel" select="1" />
|
||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
||||
|
||||
@@ -6,9 +6,8 @@
|
||||
|
||||
<para>
|
||||
The primary purpose for running BitBake is to produce some kind
|
||||
of output such as a single installable package, a kernel, a software
|
||||
development kit, or even a full, board-specific bootable Linux image,
|
||||
complete with bootloader, kernel, and root filesystem.
|
||||
of output such as an image, a kernel, or a software development
|
||||
kit.
|
||||
Of course, you can execute the <filename>bitbake</filename>
|
||||
command with options that cause it to execute single tasks,
|
||||
compile single recipe files, capture or clear data, or simply
|
||||
@@ -27,40 +26,23 @@
|
||||
see
|
||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
||||
section.
|
||||
<note>
|
||||
<para>
|
||||
Prior to executing BitBake, you should take advantage of available
|
||||
parallel thread execution on your build host by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your project's <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common way to determine this value for your build host is to run:
|
||||
<literallayout class='monospaced'>
|
||||
$ grep processor /proc/cpuinfo
|
||||
</literallayout>
|
||||
and count the number of processors displayed. Note that the number of
|
||||
processors will take into account hyper-threading, so that a quad-core
|
||||
build host with hyper-threading will most likely show eight processors,
|
||||
which is the value you would then assign to that variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A possibly simpler solution is that some Linux distributions
|
||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Prior to executing BitBake, you should take advantage of parallel
|
||||
thread execution by setting the
|
||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
||||
variable in your <filename>local.conf</filename>
|
||||
configuration file.
|
||||
</note>
|
||||
|
||||
<section id='parsing-the-base-configuration-metadata'>
|
||||
<title>Parsing the Base Configuration Metadata</title>
|
||||
|
||||
<para>
|
||||
The first thing BitBake does is parse base configuration
|
||||
metadata.
|
||||
Base configuration metadata consists of your project's
|
||||
Base configuration metadata consists of the
|
||||
<filename>bblayers.conf</filename> file to determine what
|
||||
layers BitBake needs to recognize, all necessary
|
||||
<filename>layer.conf</filename> files (one from each layer),
|
||||
@@ -89,11 +71,10 @@
|
||||
and
|
||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
||||
<filename>BBPATH</filename> is used to search for
|
||||
configuration and class files under the
|
||||
<filename>conf</filename> and <filename>classes</filename>
|
||||
configuration and class files under
|
||||
<filename>conf/</filename> and <filename>class/</filename>
|
||||
directories, respectively.
|
||||
<filename>BBFILES</filename> is used to locate both recipe
|
||||
and recipe append files
|
||||
<filename>BBFILES</filename> is used to find recipe files
|
||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
||||
If there is no <filename>bblayers.conf</filename> file,
|
||||
it is assumed the user has set the <filename>BBPATH</filename>
|
||||
@@ -101,7 +82,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, the <filename>bitbake.conf</filename> file is located
|
||||
Next, the <filename>bitbake.conf</filename> file is searched
|
||||
using the <filename>BBPATH</filename> variable that was
|
||||
just constructed.
|
||||
The <filename>bitbake.conf</filename> file may also include other
|
||||
@@ -136,18 +117,18 @@
|
||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
||||
This file is expected to contain a
|
||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
||||
variable that is a space-delimited list of 'layer' directories.
|
||||
variable that is a space delimited list of 'layer' directories.
|
||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> variables directly in the environment.
|
||||
file then it is assumed the user has set the <filename>BBPATH</filename>
|
||||
and <filename>BBFILES</filename> directly in the environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
||||
file is located and parsed with the
|
||||
file is searched for and parsed with the
|
||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
||||
variable being set to the directory where the layer was found.
|
||||
The idea is these files automatically set up
|
||||
The idea is these files automatically setup
|
||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
||||
and other variables correctly for a given build directory.
|
||||
</para>
|
||||
@@ -162,7 +143,7 @@
|
||||
|
||||
<para>
|
||||
Only variable definitions and include directives are allowed
|
||||
in BitBake <filename>.conf</filename> files.
|
||||
in <filename>.conf</filename> files.
|
||||
Some variables directly influence BitBake's behavior.
|
||||
These variables might have been set from the environment
|
||||
depending on the environment variables previously
|
||||
@@ -185,8 +166,7 @@
|
||||
Other classes that are specified in the configuration using the
|
||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
||||
variable are also included.
|
||||
BitBake searches for class files in a
|
||||
<filename>classes</filename> subdirectory under
|
||||
BitBake searches for class files in a "classes" subdirectory under
|
||||
the paths in <filename>BBPATH</filename> in the same way as
|
||||
configuration files.
|
||||
</para>
|
||||
@@ -209,7 +189,7 @@
|
||||
If a recipe uses a closing curly brace within the function and
|
||||
the character has no leading spaces, BitBake produces a parsing
|
||||
error.
|
||||
If you use a pair of curly braces in a shell function, the
|
||||
If you use a pair of curly brace in a shell function, the
|
||||
closing curly brace must not be located at the start of the line
|
||||
without leading spaces.
|
||||
</para>
|
||||
@@ -281,14 +261,14 @@
|
||||
One common convention is to use the recipe filename to define
|
||||
pieces of metadata.
|
||||
For example, in <filename>bitbake.conf</filename> the recipe
|
||||
name and version are used to set the variables
|
||||
name and version set
|
||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
</literallayout>
|
||||
In this example, a recipe called "something_1.2.3.bb" would set
|
||||
In this example, a recipe called "something_1.2.3.bb" sets
|
||||
<filename>PN</filename> to "something" and
|
||||
<filename>PV</filename> to "1.2.3".
|
||||
</para>
|
||||
@@ -351,55 +331,38 @@
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-providers'>
|
||||
<title>Providers</title>
|
||||
<title>Preferences and Providers</title>
|
||||
|
||||
<para>
|
||||
Assuming BitBake has been instructed to execute a target
|
||||
and that all the recipe files have been parsed, BitBake
|
||||
starts to figure out how to build the target.
|
||||
BitBake looks through the <filename>PROVIDES</filename> list
|
||||
for each of the recipes.
|
||||
A <filename>PROVIDES</filename> list is the list of names by which
|
||||
the recipe can be known.
|
||||
Each recipe's <filename>PROVIDES</filename> list is created
|
||||
implicitly through the recipe's
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
and explicitly through the recipe's
|
||||
BitBake starts by looking through the
|
||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
||||
variable, which is optional.
|
||||
set in recipe files.
|
||||
The default <filename>PROVIDES</filename> for a recipe is its name
|
||||
(<link linkend='var-PN'><filename>PN</filename></link>),
|
||||
however, a recipe can provide multiple things.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
||||
functionality can be found under an alternative name or names other
|
||||
than the implicit <filename>PN</filename> name.
|
||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
||||
contained the following:
|
||||
As an example of adding an extra provider, suppose a recipe named
|
||||
<filename>foo_1.0.bb</filename> contained the following:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "fullkeyboard"
|
||||
PROVIDES += "virtual/bar_1.0"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
||||
Consequently, the functionality found in
|
||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
||||
different names.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-preferences'>
|
||||
<title>Preferences</title>
|
||||
|
||||
<para>
|
||||
The <filename>PROVIDES</filename> list is only part of the solution
|
||||
for figuring out a target's recipes.
|
||||
Because targets might have multiple providers, BitBake needs
|
||||
to prioritize providers by determining provider preferences.
|
||||
The recipe now provides both "foo_1.0" and "virtual/bar_1.0".
|
||||
The "virtual/" namespace is often used to denote cases where
|
||||
multiple providers are expected with the user choosing between
|
||||
them.
|
||||
Kernels and toolchain components are common cases of this in
|
||||
OpenEmbedded.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A common example in which a target has multiple providers
|
||||
is "virtual/kernel", which is on the
|
||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
||||
Sometimes a target might have multiple providers.
|
||||
A common example is "virtual/kernel", which is provided by each
|
||||
kernel recipe.
|
||||
Each machine often selects the best kernel provider by using a
|
||||
line similar to the following in the machine configuration file:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -414,7 +377,7 @@
|
||||
|
||||
<para>
|
||||
Understanding how providers are chosen is made complicated by the fact
|
||||
that multiple versions might exist for a given provider.
|
||||
that multiple versions might exist.
|
||||
BitBake defaults to the highest version of a provider.
|
||||
Version comparisons are made using the same method as Debian.
|
||||
You can use the
|
||||
@@ -423,19 +386,13 @@
|
||||
You can influence the order by using the
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
variable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, files have a preference of "0".
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
||||
recipe unlikely to be used unless it is explicitly referenced.
|
||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
||||
likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any
|
||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
||||
and more experimental recipe versions until they have undergone
|
||||
sufficient testing to be considered stable.
|
||||
Setting the <filename>DEFAULT_PREFERENCE</filename> to "1" makes it likely the recipe is used.
|
||||
<filename>PREFERRED_VERSION</filename> overrides any <filename>DEFAULT_PREFERENCE</filename> setting.
|
||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer and more experimental recipe
|
||||
versions until they have undergone sufficient testing to be considered stable.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -444,16 +401,18 @@
|
||||
version, unless otherwise specified.
|
||||
If the recipe in question has a
|
||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
||||
set lower than the other recipes (default is 0), then
|
||||
it will not be selected.
|
||||
set lower than
|
||||
the other recipes (default is 0), then it will not be
|
||||
selected.
|
||||
This allows the person or persons maintaining
|
||||
the repository of recipe files to specify
|
||||
their preference for the default selected version.
|
||||
Additionally, the user can specify their preferred version.
|
||||
In addition, the user can specify their preferred version.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
||||
If the first recipe is named <filename>a_1.1.bb</filename>,
|
||||
then the
|
||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
||||
will be set to “a”, and the
|
||||
<link linkend='var-PV'><filename>PV</filename></link>
|
||||
@@ -461,38 +420,19 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
||||
If we then have a recipe named <filename>a_1.2.bb</filename>, BitBake
|
||||
will choose 1.2 by default.
|
||||
However, if you define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, you
|
||||
can change that preference:
|
||||
However, if we define the following variable in a
|
||||
<filename>.conf</filename> file that BitBake parses, we
|
||||
can change that.
|
||||
<literallayout class='monospaced'>
|
||||
PREFERRED_VERSION_a = "1.1"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>
|
||||
It is common for a recipe to provide two versions -- a stable,
|
||||
numbered (and preferred) version, and a version that is
|
||||
automatically checked out from a source code repository that
|
||||
is considered more "bleeding edge" but can be selected only
|
||||
explicitly.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For example, in the OpenEmbedded codebase, there is a standard,
|
||||
versioned recipe file for BusyBox,
|
||||
<filename>busybox_1.22.1.bb</filename>,
|
||||
but there is also a Git-based version,
|
||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
||||
<literallayout class='monospaced'>
|
||||
DEFAULT_PREFERENCE = "-1"
|
||||
</literallayout>
|
||||
to ensure that the numbered, stable version is always preferred
|
||||
unless the developer selects otherwise.
|
||||
</para>
|
||||
</note>
|
||||
<para>
|
||||
In summary, BitBake has created a list of providers, which is prioritized, for each target.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='bb-bitbake-dependencies'>
|
||||
@@ -555,7 +495,7 @@
|
||||
As each task completes, a timestamp is written to the directory specified by the
|
||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
||||
On subsequent runs, BitBake looks in the build directory within
|
||||
<filename>tmp/stamps</filename> and does not rerun
|
||||
<filename>tmp/stamps</filename>and does not rerun
|
||||
tasks that are already completed unless a timestamp is found to be invalid.
|
||||
Currently, invalid timestamps are only considered on a per
|
||||
recipe file basis.
|
||||
@@ -595,7 +535,7 @@
|
||||
<title>Executing Tasks</title>
|
||||
|
||||
<para>
|
||||
Tasks can be either a shell task or a Python task.
|
||||
Tasks can either be a shell task or a Python task.
|
||||
For shell tasks, BitBake writes a shell script to
|
||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
||||
and then executes the script.
|
||||
@@ -815,9 +755,9 @@
|
||||
to determine the stamps and delta where these two
|
||||
stamp trees diverge.
|
||||
<note>
|
||||
It is likely that future versions of BitBake will
|
||||
It is likely that future versions of BitBake with
|
||||
provide other signature handlers triggered through
|
||||
additional "-S" parameters.
|
||||
additional "-S" paramters.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
BitBake's fetch module is a standalone piece of library code
|
||||
that deals with the intricacies of downloading source code
|
||||
and files from remote systems.
|
||||
Fetching source code is one of the cornerstones of building software.
|
||||
Fetching source code is one of the corner stones of building software.
|
||||
As such, this module forms an important part of BitBake.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The current fetch module is called "fetch2" and refers to the
|
||||
fact that it is the second major version of the API.
|
||||
The original version is obsolete and has been removed from the codebase.
|
||||
The original version is obsolete and removed from the codebase.
|
||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
||||
manual.
|
||||
</para>
|
||||
@@ -60,19 +60,17 @@
|
||||
<note>
|
||||
For convenience, the naming in these examples matches
|
||||
the variables used by OpenEmbedded.
|
||||
If you want to see the above code in action, examine
|
||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
||||
</note>
|
||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
||||
variables are not hardcoded into the fetcher, since those fetcher
|
||||
methods can be (and are) called with different variable names.
|
||||
variables are not coded into the fetcher.
|
||||
They variables can (and are) called with different variable names.
|
||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
||||
the fetch module to fetch the sstate files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
When the <filename>download()</filename> method is called,
|
||||
BitBake tries to resolve the URLs by looking for source files
|
||||
BitBake tries to fulfill the URLs by looking for source files
|
||||
in a specific search order:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
||||
@@ -86,7 +84,7 @@
|
||||
<filename>SRC_URI</filename>).
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
||||
If fetch failures occur, BitBake next uses mirror locations as
|
||||
If fetch failures occur, BitBake next uses mirror location as
|
||||
defined by the
|
||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
||||
variable.
|
||||
@@ -151,7 +149,7 @@
|
||||
<para>
|
||||
File integrity is of key importance for reproducing builds.
|
||||
For non-local archive downloads, the fetcher code can verify
|
||||
SHA-256 and MD5 checksums to ensure the archives have been
|
||||
sha256 and md5 checksums to ensure the archives have been
|
||||
downloaded correctly.
|
||||
You can specify these checksums by using the
|
||||
<filename>SRC_URI</filename> variable with the appropriate
|
||||
@@ -262,8 +260,8 @@
|
||||
<para>
|
||||
This submodule handles URLs that begin with
|
||||
<filename>file://</filename>.
|
||||
The filename you specify within the URL can be
|
||||
either an absolute or relative path to a file.
|
||||
The filename you specify with in the URL can
|
||||
either be an absolute or relative path to a file.
|
||||
If the filename is relative, the contents of the
|
||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
||||
variable is used in the same way
|
||||
@@ -288,45 +286,15 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of example URLs, the first relative and
|
||||
the second absolute:
|
||||
Here are some example URLs:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "file://relativefile.patch"
|
||||
SRC_URI = "file://relativefile.patch;this=ignored"
|
||||
SRC_URI = "file:///Users/ich/very_important_software"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='cvs-fetcher'>
|
||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
||||
|
||||
@@ -366,7 +334,7 @@
|
||||
The supported parameters are as follows:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>"method":</emphasis>
|
||||
The protocol over which to communicate with the CVS server.
|
||||
The protocol over which to communicate with the cvs server.
|
||||
By default, this protocol is "pserver".
|
||||
If "method" is set to "ext", BitBake examines the
|
||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
||||
@@ -426,6 +394,36 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='http-ftp-fetcher'>
|
||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher obtains files from web and FTP servers.
|
||||
Internally, the fetcher uses the wget utility.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The executable and parameters used are specified by the
|
||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
||||
to a sensible values.
|
||||
The fetcher supports a parameter "downloadfilename" that
|
||||
allows the name of the downloaded file to be specified.
|
||||
Specifying the name of the downloaded file is useful
|
||||
for avoiding collisions in
|
||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
||||
when dealing with multiple files that have the same name.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Some example URLs are as follows:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||
SRC_URI = "ftp://you@oe.handheld.sorg/home/you/secret.plan"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='svn-fetcher'>
|
||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
||||
|
||||
@@ -468,13 +466,6 @@
|
||||
compile-time when set to "keep".
|
||||
By default, these directories are removed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
||||
When required, sets the username for the transport.
|
||||
By default, this parameter is empty.
|
||||
The transport username is different than the username
|
||||
used in the main URL, which is passed to the subversion
|
||||
command.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
Following are two examples using svn:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -485,14 +476,14 @@
|
||||
</section>
|
||||
|
||||
<section id='git-fetcher'>
|
||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
||||
<title>GIT Fetcher (<filename>git://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from the Git
|
||||
source control system.
|
||||
The fetcher works by creating a bare clone of the
|
||||
remote into <filename>GITDIR</filename>, which is
|
||||
usually <filename>DL_DIR/git2</filename>.
|
||||
usually <filename>DL_DIR/git</filename>.
|
||||
This bare clone is then cloned into the work directory during the
|
||||
unpack stage when a specific tree is checked out.
|
||||
This is done using alternates and by reference to
|
||||
@@ -560,7 +551,7 @@
|
||||
network.
|
||||
For that reason, tags are often not used.
|
||||
As far as Git is concerned, the "tag" parameter behaves
|
||||
effectively the same as the "rev" parameter.
|
||||
effectively the same as the "revision" parameter.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>"subpath":</emphasis>
|
||||
Limits the checkout to a specific subpath of the tree.
|
||||
@@ -579,116 +570,6 @@
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='gitsm-fetcher'>
|
||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule inherits from the
|
||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
||||
that fetcher's behavior by fetching a repository's submodules.
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
||||
is passed to the Git fetcher as described in the
|
||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
||||
section.
|
||||
<note>
|
||||
<title>Notes and Warnings</title>
|
||||
<para>
|
||||
You must clean a recipe when switching between
|
||||
'<filename>git://</filename>' and
|
||||
'<filename>gitsm://</filename>' URLs.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Git Submodules fetcher is not a complete fetcher
|
||||
implementation.
|
||||
The fetcher has known issues where it does not use the
|
||||
normal source mirroring infrastructure properly.
|
||||
</para>
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='clearcase-fetcher'>
|
||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
||||
|
||||
<para>
|
||||
This fetcher submodule fetches code from a
|
||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
||||
repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To use this fetcher, make sure your recipe has proper
|
||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
</literallayout>
|
||||
The fetcher uses the <filename>rcleartool</filename> or
|
||||
<filename>cleartool</filename> remote client, depending on
|
||||
which one is available.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are options for the <filename>SRC_URI</filename>
|
||||
statement:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
||||
The name, which must include the
|
||||
prepending "/" character, of the ClearCase VOB.
|
||||
This option is required.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
||||
The module, which must include the
|
||||
prepending "/" character, in the selected VOB
|
||||
The <filename>module</filename> and <filename>vob</filename>
|
||||
options are combined to create the following load rule in
|
||||
the view config spec:
|
||||
<literallayout class='monospaced'>
|
||||
load <vob><module>
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
||||
The protocol, which can be either <filename>http</filename> or
|
||||
<filename>https</filename>.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
By default, the fetcher creates a configuration specification.
|
||||
If you want this specification written to an area other than the default,
|
||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
||||
in your recipe to define where the specification is written.
|
||||
<note>
|
||||
the <filename>SRCREV</filename> loses its functionality if you
|
||||
specify this variable.
|
||||
However, <filename>SRCREV</filename> is still used to label the
|
||||
archive after a fetch even though it does not define what is
|
||||
fetched.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are a couple of other behaviors worth mentioning:
|
||||
<itemizedlist>
|
||||
<listitem><para>
|
||||
When using <filename>cleartool</filename>, the login of
|
||||
<filename>cleartool</filename> is handled by the system.
|
||||
The login require no special steps.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
In order to use <filename>rcleartool</filename> with authenticated
|
||||
users, an "rcleartool login" is necessary before using the fetcher.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='other-fetchers'>
|
||||
<title>Other Fetchers</title>
|
||||
|
||||
@@ -701,6 +582,9 @@
|
||||
<listitem><para>
|
||||
Perforce (<filename>p4://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Git Submodules (<filename>gitsm://</filename>)
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
||||
</para></listitem>
|
||||
|
||||
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Time: 00:00:00
|
||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
||||
NOTE: Resolving any missing task queue dependencies
|
||||
NOTE: Preparing RunQueue
|
||||
NOTE: Preparing runqueue
|
||||
NOTE: Executing RunQueue Tasks
|
||||
********************
|
||||
* *
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
Welcome to the BitBake User Manual.
|
||||
This manual provides information on the BitBake tool.
|
||||
The information attempts to be as independent as possible regarding
|
||||
systems that use BitBake, such as OpenEmbedded and the
|
||||
Yocto Project.
|
||||
In some cases, scenarios or examples within the context of
|
||||
systems that use BitBake, such as the Yocto Project and
|
||||
OpenEmbedded.
|
||||
In some cases, scenarios or examples that within the context of
|
||||
a build system are used in the manual to help with understanding.
|
||||
For these cases, the manual clearly states the context.
|
||||
</para>
|
||||
@@ -35,31 +35,28 @@
|
||||
<listitem><para>
|
||||
BitBake executes tasks according to provided
|
||||
metadata that builds up the tasks.
|
||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
||||
and related recipe "append" (<filename>.bbappend</filename>)
|
||||
files, configuration (<filename>.conf</filename>) and
|
||||
underlying include (<filename>.inc</filename>) files, and
|
||||
in class (<filename>.bbclass</filename>) files.
|
||||
The metadata provides
|
||||
Metadata is stored in recipe (<filename>.bb</filename>),
|
||||
configuration (<filename>.conf</filename>), and class
|
||||
(<filename>.bbclass</filename>) files and provides
|
||||
BitBake with instructions on what tasks to run and
|
||||
the dependencies between those tasks.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a fetcher library for obtaining source
|
||||
code from various places such as local files, source control
|
||||
systems, or websites.
|
||||
code from various places such as source control
|
||||
systems or websites.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
The instructions for each unit to be built (e.g. a piece
|
||||
of software) are known as "recipe" files and
|
||||
of software) are known as recipe files and
|
||||
contain all the information about the unit
|
||||
(dependencies, source file locations, checksums, description
|
||||
and so on).
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
BitBake includes a client/server abstraction and can
|
||||
be used from a command line or used as a service over
|
||||
XML-RPC and has several different user interfaces.
|
||||
be used from a command line or used as a service over XMLRPC and
|
||||
has several different user interfaces.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -72,7 +69,7 @@
|
||||
BitBake was originally a part of the OpenEmbedded project.
|
||||
It was inspired by the Portage package management system
|
||||
used by the Gentoo Linux distribution.
|
||||
On December 7, 2004, OpenEmbedded project team member
|
||||
On December 7, 2004, OpenEmbedded project team member,
|
||||
Chris Larson split the project into two distinct pieces:
|
||||
<itemizedlist>
|
||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
||||
@@ -82,11 +79,8 @@
|
||||
Today, BitBake is the primary basis of the
|
||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
||||
project, which is being used to build and maintain Linux
|
||||
distributions such as the
|
||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
||||
and which is also being used as the build tool for Linux projects
|
||||
such as the
|
||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
||||
distributions such as the Angstrom Distribution and which is used
|
||||
as the build tool for Linux projects such as the Yocto Project.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -94,7 +88,7 @@
|
||||
an aspiring embedded Linux distribution.
|
||||
All of the build systems used by traditional desktop Linux
|
||||
distributions lacked important functionality, and none of the
|
||||
ad hoc Buildroot-based systems, prevalent in the
|
||||
ad-hoc Buildroot-based systems, prevalent in the
|
||||
embedded space, were scalable or maintainable.
|
||||
</para>
|
||||
|
||||
@@ -144,7 +138,7 @@
|
||||
projects for their builds.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Provide an inheritance mechanism to share
|
||||
Provide an inheritance mechanism that share
|
||||
common metadata between many packages.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -157,7 +151,7 @@
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Split metadata into layers and allow layers
|
||||
to enhance or override other layers.
|
||||
to override each other.
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
Allow representation of a given set of input variables
|
||||
@@ -184,14 +178,14 @@
|
||||
what tasks are required to run, and executes those tasks.
|
||||
Similar to GNU Make, BitBake controls how software is
|
||||
built.
|
||||
GNU Make achieves its control through "makefiles", while
|
||||
GNU Make achieves its control through "makefiles".
|
||||
BitBake uses "recipes".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
BitBake extends the capabilities of a simple
|
||||
tool like GNU Make by allowing for the definition of much more
|
||||
complex tasks, such as assembling entire embedded Linux
|
||||
tool like GNU Make by allowing for much more complex tasks
|
||||
to be completed, such as assembling entire embedded Linux
|
||||
distributions.
|
||||
</para>
|
||||
|
||||
@@ -209,20 +203,14 @@
|
||||
<filename>.bb</filename>, are the most basic metadata files.
|
||||
These recipe files provide BitBake with the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>Descriptive information about the
|
||||
package (author, homepage, license, and so on)</para></listitem>
|
||||
<listitem><para>Descriptive information about the package</para></listitem>
|
||||
<listitem><para>The version of the recipe</para></listitem>
|
||||
<listitem><para>Existing dependencies (both build
|
||||
and runtime dependencies)</para></listitem>
|
||||
<listitem><para>Where the source code resides and
|
||||
how to fetch it</para></listitem>
|
||||
<listitem><para>Whether the source code requires
|
||||
any patches, where to find them, and how to apply
|
||||
them</para></listitem>
|
||||
<listitem><para>How to configure and compile the
|
||||
source code</para></listitem>
|
||||
<listitem><para>Existing Dependencies</para></listitem>
|
||||
<listitem><para>Where the source code resides</para></listitem>
|
||||
<listitem><para>Whether the source code requires any patches</para></listitem>
|
||||
<listitem><para>How to compile the source code</para></listitem>
|
||||
<listitem><para>Where on the target machine to install the
|
||||
package or packages created</para></listitem>
|
||||
package being compiled</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -234,11 +222,7 @@
|
||||
The term "package" is also commonly used to describe recipes.
|
||||
However, since the same word is used to describe packaged
|
||||
output from a project, it is best to maintain a single
|
||||
descriptive term - "recipes".
|
||||
Put another way, a single "recipe" file is quite capable
|
||||
of generating a number of related but separately installable
|
||||
"packages".
|
||||
In fact, that ability is fairly common.
|
||||
descriptive term, "recipes".
|
||||
</note>
|
||||
</para>
|
||||
</section>
|
||||
@@ -273,7 +257,7 @@
|
||||
called <filename>base.bbclass</filename>.
|
||||
You can find this file in the
|
||||
<filename>classes</filename> directory.
|
||||
The <filename>base.bbclass</filename> class files is special since it
|
||||
The <filename>base.bbclass</filename> is special since it
|
||||
is always included automatically for all recipes
|
||||
and classes.
|
||||
This class contains definitions for standard basic tasks such
|
||||
@@ -300,8 +284,7 @@
|
||||
To illustrate how you can use layers to keep things modular,
|
||||
consider customizations you might make to support a specific target machine.
|
||||
These types of customizations typically reside in a special layer,
|
||||
rather than a general layer, called a Board Support Package (BSP)
|
||||
Layer.
|
||||
rather than a general layer, called a Board Specific Package (BSP) Layer.
|
||||
Furthermore, the machine customizations should be isolated from
|
||||
recipes and metadata that support a new GUI environment, for
|
||||
example.
|
||||
@@ -321,8 +304,9 @@
|
||||
|
||||
<para>
|
||||
Append files, which are files that have the
|
||||
<filename>.bbappend</filename> file extension, extend or
|
||||
override information in an existing recipe file.
|
||||
<filename>.bbappend</filename> file extension, add or
|
||||
extend build information to an existing
|
||||
recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -335,9 +319,8 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Information in append files extends or
|
||||
overrides the information in the underlying,
|
||||
similarly-named recipe files.
|
||||
Information in append files overrides the information in the
|
||||
similarly-named recipe file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -362,12 +345,6 @@
|
||||
However, if you named the append file
|
||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In the most general case, you could name the append file something as
|
||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
||||
version independent.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
@@ -436,20 +413,6 @@
|
||||
you have a directory entitled
|
||||
<filename>bitbake-1.17.0</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
||||
Build Checkout:</emphasis>
|
||||
A final possibility for getting a copy of BitBake is that it
|
||||
already comes with your checkout of a larger Bitbake-based build
|
||||
system, such as Poky or Yocto Project.
|
||||
Rather than manually checking out individual layers and
|
||||
gluing them together yourself, you can check
|
||||
out an entire build system.
|
||||
The checkout will already include a version of BitBake that
|
||||
has been thoroughly tested for compatibility with the other
|
||||
components.
|
||||
For information on how to check out a particular BitBake-based
|
||||
build system, consult that build system's supporting documentation.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
@@ -508,16 +471,14 @@
|
||||
-D, --debug Increase the debug level. You can specify this more
|
||||
than once.
|
||||
-n, --dry-run Don't execute, just go through the motions.
|
||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
||||
-S DUMP_SIGNATURES, --dump-signatures=DUMP_SIGNATURES
|
||||
Dump out the signature construction information, with
|
||||
no task execution. The SIGNATURE_HANDLER parameter is
|
||||
passed to the handler. Two common values are none and
|
||||
printdiff but the handler may define more/less. none
|
||||
means only dump the signature, printdiff means compare
|
||||
the dumped signature with the cached one.
|
||||
no task execution. Parameters are passed to the
|
||||
signature handling code, use 'none' if no specific
|
||||
handler is required.
|
||||
-p, --parse-only Quit after parsing the BB recipes.
|
||||
-s, --show-versions Show current and preferred versions of all recipes.
|
||||
-e, --environment Show the global or per-recipe environment complete
|
||||
-e, --environment Show the global or per-package environment complete
|
||||
with information about where variables were
|
||||
set/changed.
|
||||
-g, --graphviz Save dependency tree information for the specified
|
||||
@@ -532,8 +493,6 @@
|
||||
-u UI, --ui=UI The user interface to use (e.g. knotty, hob, depexp).
|
||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
||||
Choose which server to use, process or xmlrpc.
|
||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
||||
connecting to a remote server.
|
||||
--revisions-changed Set the exit code depending on whether upstream
|
||||
floating revisions have changed or not.
|
||||
--server-only Run bitbake without a UI, only starting a server
|
||||
@@ -600,14 +559,14 @@
|
||||
when one wants to manage multiple <filename>.bb</filename>
|
||||
files.
|
||||
Clearly there needs to be a way to tell BitBake what
|
||||
files are available and, of those, which you
|
||||
files are available, and of those, which you
|
||||
want to execute.
|
||||
There also needs to be a way for each recipe
|
||||
to express its dependencies, both for build-time and
|
||||
runtime.
|
||||
There must be a way for you to express recipe preferences
|
||||
when multiple recipes provide the same functionality, or when
|
||||
there are multiple versions of a recipe.
|
||||
there are multiple versions of a recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
|
||||
@@ -159,10 +159,6 @@
|
||||
using the "+=" and "=+" operators.
|
||||
These operators insert a space between the current
|
||||
value and prepended or appended value.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -182,10 +178,6 @@
|
||||
<para>
|
||||
If you want to append or prepend values without an
|
||||
inserted space, use the ".=" and "=." operators.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators take immediate effect during parsing.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -206,13 +198,6 @@
|
||||
You can also append and prepend a variable's value
|
||||
using an override style syntax.
|
||||
When you use this syntax, no spaces are inserted.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
These operators differ from the ":=", ".=", "=.", "+=", and "=+"
|
||||
operators in that their effects are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
Here are some examples:
|
||||
<literallayout class='monospaced'>
|
||||
B = "bval"
|
||||
@@ -232,6 +217,13 @@
|
||||
override syntax.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The operators "_append" and "_prepend" differ from
|
||||
the operators ".=" and "=." in that they are deferred
|
||||
until after parsing completes rather than being immediately
|
||||
applied.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='removing-override-style-syntax'>
|
||||
@@ -291,18 +283,7 @@
|
||||
The variable <filename>FOO</filename> has two flags:
|
||||
<filename>a</filename> and <filename>b</filename>.
|
||||
The flags are immediately set to "abc" and "123", respectively.
|
||||
The <filename>a</filename> flag becomes "abc 456".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
No need exists to pre-define variable flags.
|
||||
You can simply start using them.
|
||||
One extremely common application
|
||||
is to attach some brief documentation to a BitBake variable as
|
||||
follows:
|
||||
<literallayout class='monospaced'>
|
||||
CACHE[doc] = "The directory holding the cache of the metadata."
|
||||
</literallayout>
|
||||
The <filename>a</filename> flag becomes "abc456".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -317,19 +298,7 @@
|
||||
DATE = "${@time.strftime('%Y%m%d',time.gmtime())}"
|
||||
</literallayout>
|
||||
This example results in the <filename>DATE</filename>
|
||||
variable being set to the current date.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Probably the most common use of this feature is to extract
|
||||
the value of variables from BitBake's internal data dictionary,
|
||||
<filename>d</filename>.
|
||||
The following lines select the values of a package name
|
||||
and its version number, respectively:
|
||||
<literallayout class='monospaced'>
|
||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[0] or 'defaultpkgname'}"
|
||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE'),d)[1] or '1.0'}"
|
||||
</literallayout>
|
||||
variable becoming the current date.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -403,25 +372,6 @@
|
||||
You select the os-specific version of the <filename>TEST</filename>
|
||||
variable by appending the "os" override to the variable
|
||||
(i.e.<filename>TEST_os</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To better understand this, consider a practical example
|
||||
that assumes an OpenEmbedded metadata-based Linux
|
||||
kernel recipe file.
|
||||
The following lines from the recipe file first set
|
||||
the kernel branch variable <filename>KBRANCH</filename>
|
||||
to a default value, then conditionally override that
|
||||
value based on the architecture of the build:
|
||||
<literallayout class='monospaced'>
|
||||
KBRANCH = "standard/base"
|
||||
KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
|
||||
KBRANCH_qemumips = "standard/mti-malta32"
|
||||
KBRANCH_qemuppc = "standard/qemuppc"
|
||||
KBRANCH_qemux86 = "standard/common-pc/base"
|
||||
KBRANCH_qemux86-64 = "standard/common-pc-64/base"
|
||||
KBRANCH_qemumips64 = "standard/mti-malta64"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Appending and Prepending:</emphasis>
|
||||
BitBake also supports append and prepend operations to
|
||||
@@ -435,19 +385,6 @@
|
||||
</literallayout>
|
||||
In this example, <filename>DEPENDS</filename> becomes
|
||||
"glibc ncurses libmad".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Again, using an OpenEmbedded metadata-based
|
||||
kernel recipe file as an example, the
|
||||
following lines will conditionally append to the
|
||||
<filename>KERNEL_FEATURES</filename> variable based
|
||||
on the architecture:
|
||||
<literallayout class='monospaced'>
|
||||
KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
|
||||
KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -1203,6 +1140,12 @@
|
||||
Tells BitBake to not generate a stamp file for a task,
|
||||
which implies the task should always be executed.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>fakeroot:</emphasis>
|
||||
Causes a task to be run in a fakeroot environment,
|
||||
obtained by adding the variables in
|
||||
<link linkend='var-FAKEROOTENV'><filename>FAKEROOTENV</filename></link>
|
||||
to the environment.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>umask:</emphasis>
|
||||
The umask to run the task under.
|
||||
</para></listitem>
|
||||
@@ -1523,9 +1466,9 @@
|
||||
complete before that task can be executed.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_configure[deptask] = "do_populate_sysroot"
|
||||
do_configure[deptask] = "do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of each item in <filename>DEPENDS</filename> must complete before
|
||||
<filename>do_configure</filename> can execute.
|
||||
</para>
|
||||
@@ -1600,9 +1543,9 @@
|
||||
the data in <filename>DEPENDS</filename>.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
do_patch[depends] = "quilt-native:do_populate_sysroot"
|
||||
do_patch[depends] = "quilt-native:do_populate_staging"
|
||||
</literallayout>
|
||||
In this example, the <filename>do_populate_sysroot</filename>
|
||||
In this example, the <filename>do_populate_staging</filename>
|
||||
task of the target <filename>quilt-native</filename>
|
||||
must have completed before the
|
||||
<filename>do_patch</filename> task can execute.
|
||||
|
||||
@@ -1560,16 +1560,8 @@
|
||||
BitBake uses <filename>OVERRIDES</filename> to control
|
||||
what variables are overridden after BitBake parses
|
||||
recipes and configuration files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following is a simple example that uses an overrides
|
||||
list based on machine architectures:
|
||||
<literallayout class='monospaced'>
|
||||
OVERRIDES = "arm:x86:mips:powerpc"
|
||||
</literallayout>
|
||||
You can find information on how to use
|
||||
<filename>OVERRIDES</filename> in the
|
||||
You can find more information on how overrides are handled
|
||||
in the
|
||||
"<link linkend='conditional-syntax-overrides'>Conditional Syntax (Overrides)</link>"
|
||||
section.
|
||||
</para>
|
||||
@@ -1752,28 +1744,16 @@
|
||||
<glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
|
||||
<glossdef>
|
||||
<para>
|
||||
A list of aliases by which a particular recipe can be
|
||||
known.
|
||||
By default, a recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its <filename>PROVIDES</filename>
|
||||
list.
|
||||
If a recipe uses <filename>PROVIDES</filename>, the
|
||||
additional aliases are synonyms for the recipe and can
|
||||
be useful satisfying dependencies of other recipes during
|
||||
the build as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Consider the following example
|
||||
<filename>PROVIDES</filename> statement from a recipe
|
||||
file <filename>libav_0.8.11.bb</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
PROVIDES += "libpostproc"
|
||||
</literallayout>
|
||||
The <filename>PROVIDES</filename> statement results in
|
||||
the "libav" recipe also being known as "libpostproc".
|
||||
A list of aliases that a recipe also provides.
|
||||
These aliases are useful for satisfying dependencies of
|
||||
other recipes during the build (as specified by
|
||||
<filename><link linkend='var-DEPENDS'>DEPENDS</link></filename>).
|
||||
<note>
|
||||
A recipe's own
|
||||
<filename><link linkend='var-PN'>PN</link></filename>
|
||||
is implicitly already in its
|
||||
<filename>PROVIDES</filename> list.
|
||||
</note>
|
||||
</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
@@ -313,13 +313,6 @@ a:hover {
|
||||
/*font-weight: bold;*/
|
||||
}
|
||||
|
||||
/* This style defines how the permalink character
|
||||
appears by itself and when hovered over with
|
||||
the mouse. */
|
||||
|
||||
[alt='Permalink'] { color: #eee; }
|
||||
[alt='Permalink']:hover { color: black; }
|
||||
|
||||
|
||||
div.informalfigure,
|
||||
div.informalexample,
|
||||
@@ -800,6 +793,7 @@ div.sect2 .titlepage .title {
|
||||
|
||||
h1.title {
|
||||
background-color: transparent;
|
||||
background-image: url("figures/yocto-project-bw.png");
|
||||
background-repeat: no-repeat;
|
||||
height: 256px;
|
||||
text-indent: -9000px;
|
||||
|
||||
@@ -89,7 +89,7 @@ quit after parsing the BB files (developers only)
|
||||
show current and preferred versions of all packages
|
||||
.TP
|
||||
.B \-e, \-\-environment
|
||||
show the global or per-recipe environment (this is what used to be bbread)
|
||||
show the global or per-package environment (this is what used to be bbread)
|
||||
.TP
|
||||
.B \-g, \-\-graphviz
|
||||
emit the dependency trees of the specified packages in the dot syntax
|
||||
|
||||
39
bitbake/doc/template/component.title.xsl
vendored
39
bitbake/doc/template/component.title.xsl
vendored
@@ -1,39 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="component.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="ancestor::d:section">
|
||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
||||
</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
||||
<xsl:otherwise>1</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:if test="$generate.id.attributes = 0">
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:element>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/division.title.xsl
vendored
25
bitbake/doc/template/division.title.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="division.title">
|
||||
<xsl:param name="node" select="."/>
|
||||
|
||||
<h1>
|
||||
<xsl:attribute name="class">title</xsl:attribute>
|
||||
<xsl:call-template name="anchor">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
<xsl:with-param name="conditional" select="0"/>
|
||||
</xsl:call-template>
|
||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</h1>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
|
||||
21
bitbake/doc/template/formal.object.heading.xsl
vendored
21
bitbake/doc/template/formal.object.heading.xsl
vendored
@@ -1,21 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="formal.object.heading">
|
||||
<xsl:param name="object" select="."/>
|
||||
<xsl:param name="title">
|
||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
</xsl:param>
|
||||
<p class="title">
|
||||
<b><xsl:copy-of select="$title"/></b>
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$object"/>
|
||||
</xsl:call-template>
|
||||
</p>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
14
bitbake/doc/template/gloss-permalinks.xsl
vendored
@@ -1,14 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<xsl:template match="glossentry/glossterm">
|
||||
<xsl:apply-imports/>
|
||||
<xsl:if test="$generate.permalink != 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select=".."/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
25
bitbake/doc/template/permalinks.xsl
vendored
25
bitbake/doc/template/permalinks.xsl
vendored
@@ -1,25 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns="http://www.w3.org/1999/xhtml"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
||||
|
||||
<xsl:param name="generate.permalink" select="1"/>
|
||||
<xsl:param name="permalink.text">¶</xsl:param>
|
||||
|
||||
<xsl:template name="permalink">
|
||||
<xsl:param name="node"/>
|
||||
|
||||
<xsl:if test="$generate.permalink != '0'">
|
||||
<span class="permalink">
|
||||
<a alt="Permalink" title="Permalink">
|
||||
<xsl:attribute name="href">
|
||||
<xsl:call-template name="href.target">
|
||||
<xsl:with-param name="object" select="$node"/>
|
||||
</xsl:call-template>
|
||||
</xsl:attribute>
|
||||
<xsl:copy-of select="$permalink.text"/>
|
||||
</a>
|
||||
</span>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
55
bitbake/doc/template/section.title.xsl
vendored
55
bitbake/doc/template/section.title.xsl
vendored
@@ -1,55 +0,0 @@
|
||||
<xsl:stylesheet version="1.0"
|
||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
||||
xmlns:d="http://docbook.org/ns/docbook"
|
||||
xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
|
||||
|
||||
<xsl:template name="section.title">
|
||||
<xsl:variable name="section"
|
||||
select="(ancestor::section |
|
||||
ancestor::simplesect|
|
||||
ancestor::sect1|
|
||||
ancestor::sect2|
|
||||
ancestor::sect3|
|
||||
ancestor::sect4|
|
||||
ancestor::sect5)[last()]"/>
|
||||
|
||||
<xsl:variable name="renderas">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
|
||||
<xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
|
||||
<xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:variable name="level">
|
||||
<xsl:choose>
|
||||
<xsl:when test="$renderas != ''">
|
||||
<xsl:value-of select="$renderas"/>
|
||||
</xsl:when>
|
||||
<xsl:otherwise>
|
||||
<xsl:call-template name="section.level">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:otherwise>
|
||||
</xsl:choose>
|
||||
</xsl:variable>
|
||||
|
||||
<xsl:call-template name="section.heading">
|
||||
<xsl:with-param name="section" select="$section"/>
|
||||
<xsl:with-param name="level" select="$level"/>
|
||||
<xsl:with-param name="title">
|
||||
<xsl:apply-templates select="$section" mode="object.title.markup">
|
||||
<xsl:with-param name="allow-anchors" select="1"/>
|
||||
</xsl:apply-templates>
|
||||
<xsl:if test="$level > 0">
|
||||
<xsl:call-template name="permalink">
|
||||
<xsl:with-param name="node" select="$section"/>
|
||||
</xsl:call-template>
|
||||
</xsl:if>
|
||||
</xsl:with-param>
|
||||
</xsl:call-template>
|
||||
</xsl:template>
|
||||
</xsl:stylesheet>
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.25.0"
|
||||
__version__ = "1.22.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 7, 3):
|
||||
@@ -99,11 +99,12 @@ def error(*args):
|
||||
|
||||
def fatal(*args):
|
||||
logger.critical(''.join(args))
|
||||
raise BBHandledException()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def deprecated(func, name=None, advice=""):
|
||||
"""This is a decorator which can be used to mark functions
|
||||
as deprecated. It will result in a warning being emitted
|
||||
as deprecated. It will result in a warning being emmitted
|
||||
when the function is used."""
|
||||
import warnings
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -42,8 +42,9 @@ logger = logging.getLogger('BitBake.Build')
|
||||
|
||||
NULL = open(os.devnull, 'r+')
|
||||
|
||||
# When we execute a Python function, we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
|
||||
# When we execute a python function we'd like certain things
|
||||
# in all namespaces, hence we add them to __builtins__
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
__builtins__['bb'] = bb
|
||||
@@ -142,7 +143,7 @@ class LogTee(object):
|
||||
self.outfile.flush()
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute a BB 'function'"""
|
||||
"""Execute an BB 'function'"""
|
||||
|
||||
body = d.getVar(func)
|
||||
if not body:
|
||||
@@ -227,7 +228,7 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
with open(runfile, 'w') as script:
|
||||
bb.data.emit_func_python(func, script, d)
|
||||
script.write(code)
|
||||
|
||||
if cwd:
|
||||
try:
|
||||
@@ -241,9 +242,10 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
||||
raise
|
||||
except:
|
||||
if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
|
||||
raise
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
@@ -416,7 +418,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||
os.dup2(logfile.fileno(), oso[1])
|
||||
os.dup2(logfile.fileno(), ose[1])
|
||||
|
||||
# Ensure Python logging goes to the logfile
|
||||
# Ensure python logging goes to the logfile
|
||||
handler = logging.StreamHandler(logfile)
|
||||
handler.setFormatter(logformatter)
|
||||
# Always enable full debug output into task logfiles
|
||||
@@ -505,7 +507,7 @@ def exec_task(fn, task, d, profile = False):
|
||||
event.fire(failedevent, d)
|
||||
return 1
|
||||
|
||||
def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
def stamp_internal(taskname, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function
|
||||
Makes sure the stamp directory exists
|
||||
@@ -526,9 +528,6 @@ def stamp_internal(taskname, d, file_name, baseonly=False):
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if baseonly:
|
||||
return stamp
|
||||
|
||||
if not stamp:
|
||||
return
|
||||
|
||||
@@ -593,9 +592,8 @@ def make_stamp(task, d, file_name = None):
|
||||
# If we're in task context, write out a signature file for each task
|
||||
# as it completes
|
||||
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
||||
stampbase = stamp_internal(task, d, None, True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
|
||||
bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
|
||||
|
||||
def del_stamp(task, d, file_name = None):
|
||||
"""
|
||||
|
||||
@@ -225,16 +225,14 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
for package in self.packages_dynamic:
|
||||
cachedata.packages_dynamic[package].append(fn)
|
||||
|
||||
# Build hash of runtime depends and recommends
|
||||
# Build hash of runtime depends and rececommends
|
||||
for package in self.packages + [self.pn]:
|
||||
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
||||
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
||||
|
||||
# Collect files we may need for possible world-dep
|
||||
# calculations
|
||||
if self.not_world:
|
||||
logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
|
||||
else:
|
||||
if not self.not_world:
|
||||
cachedata.possible_world.append(fn)
|
||||
|
||||
# create a collection of all targets for sanity checking
|
||||
@@ -261,7 +259,7 @@ class Cache(object):
|
||||
|
||||
def __init__(self, data, data_hash, caches_array):
|
||||
# Pass caches_array information into Cache Constructor
|
||||
# It will be used later for deciding whether we
|
||||
# It will be used in later for deciding whether we
|
||||
# need extra cache file dump/load support
|
||||
self.caches_array = caches_array
|
||||
self.cachedir = data.getVar("CACHE", True)
|
||||
@@ -694,7 +692,7 @@ def init(cooker):
|
||||
|
||||
* Its mtime
|
||||
* The mtimes of all its dependencies
|
||||
* Whether it caused a parse.SkipRecipe exception
|
||||
* Whether it caused a parse.SkipPackage exception
|
||||
|
||||
Files causing parsing errors are evicted from the cache.
|
||||
|
||||
@@ -764,6 +762,16 @@ class MultiProcessCache(object):
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
@@ -804,7 +812,15 @@ class MultiProcessCache(object):
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
data = self.cachedata
|
||||
try:
|
||||
with open(self.cachefile, "rb") as f:
|
||||
p = pickle.Unpickler(f)
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
@@ -813,16 +829,16 @@ class MultiProcessCache(object):
|
||||
p = pickle.Unpickler(fd)
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
os.unlink(f)
|
||||
continue
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
os.unlink(f)
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
with open(self.cachefile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
@@ -33,82 +33,9 @@ def check_indent(codestr):
|
||||
return codestr
|
||||
|
||||
|
||||
# Basically pickle, in python 2.7.3 at least, does badly with data duplication
|
||||
# upon pickling and unpickling. Combine this with duplicate objects and things
|
||||
# are a mess.
|
||||
#
|
||||
# When the sets are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file.
|
||||
#
|
||||
# By having shell and python cacheline objects with setstate/getstate, we force
|
||||
# the object creation through our own routine where we can call intern (via internSet).
|
||||
#
|
||||
# We also use hashable frozensets and ensure we use references to these so that
|
||||
# duplicates can be removed, both in memory and in the resulting pickled data.
|
||||
#
|
||||
# By playing these games, the size of the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. Smaller cache files, faster load times and lower memory usage is good.
|
||||
#
|
||||
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
|
||||
# avoiding duplication of the attribute names!
|
||||
|
||||
class SetCache(object):
|
||||
def __init__(self):
|
||||
self.setcache = {}
|
||||
|
||||
def internSet(self, items):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
|
||||
class pythonCacheLine(object):
|
||||
def __init__(self, refs, execs, contains):
|
||||
self.refs = codecache.internSet(refs)
|
||||
self.execs = codecache.internSet(execs)
|
||||
self.contains = {}
|
||||
for c in contains:
|
||||
self.contains[c] = codecache.internSet(contains[c])
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.refs, self.execs, self.contains)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(refs, execs, contains) = state
|
||||
self.__init__(refs, execs, contains)
|
||||
def __hash__(self):
|
||||
l = (hash(self.refs), hash(self.execs))
|
||||
for c in sorted(self.contains.keys()):
|
||||
l = l + (c, hash(self.contains[c]))
|
||||
return hash(l)
|
||||
|
||||
class shellCacheLine(object):
|
||||
def __init__(self, execs):
|
||||
self.execs = codecache.internSet(execs)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.execs)
|
||||
|
||||
def __setstate__(self, state):
|
||||
(execs) = state
|
||||
self.__init__(execs)
|
||||
def __hash__(self):
|
||||
return hash(self.execs)
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 7
|
||||
CACHE_VERSION = 6
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -117,27 +44,6 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
# To avoid duplication in the codeparser cache, keep
|
||||
# a lookup of hashes of objects we already have
|
||||
self.pythoncachelines = {}
|
||||
self.shellcachelines = {}
|
||||
|
||||
def newPythonCacheLine(self, refs, execs, contains):
|
||||
cacheline = pythonCacheLine(refs, execs, contains)
|
||||
h = hash(cacheline)
|
||||
if h in self.pythoncachelines:
|
||||
return self.pythoncachelines[h]
|
||||
self.pythoncachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def newShellCacheLine(self, execs):
|
||||
cacheline = shellCacheLine(execs)
|
||||
h = hash(cacheline)
|
||||
if h in self.shellcachelines:
|
||||
return self.shellcachelines[h]
|
||||
self.shellcachelines[h] = cacheline
|
||||
return cacheline
|
||||
|
||||
def init_cache(self, d):
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
@@ -145,6 +51,25 @@ class CodeParserCache(MultiProcessCache):
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for k in data[0][h]["contains"]:
|
||||
data[0][h]["contains"][k] = self.internSet(data[0][h]["contains"][k])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
@@ -178,7 +103,7 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -243,19 +168,15 @@ class PythonParser():
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncache[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncache[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncache[h]["contains"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = set(codeparsercache.pythoncacheextras[h].refs)
|
||||
self.execs = set(codeparsercache.pythoncacheextras[h].execs)
|
||||
self.contains = {}
|
||||
for i in codeparsercache.pythoncacheextras[h].contains:
|
||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
self.contains = codeparsercache.pythoncacheextras[h]["contains"]
|
||||
return
|
||||
|
||||
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||
@@ -267,7 +188,10 @@ class PythonParser():
|
||||
|
||||
self.execs.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
codeparsercache.pythoncacheextras[h]["contains"] = self.contains
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -286,17 +210,18 @@ class ShellParser():
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = set(codeparsercache.shellcacheextras[h].execs)
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
self._parse_shell(value)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -86,10 +86,7 @@ class Command:
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
|
||||
# updateCache will trigger a shutdown of the parser
|
||||
# and then raise BBHandledException triggering an exit
|
||||
self.cooker.updateCache()
|
||||
if self.cooker.state == bb.cooker.state.error:
|
||||
return False
|
||||
if self.currentAsyncCommand is not None:
|
||||
(command, options) = self.currentAsyncCommand
|
||||
@@ -271,10 +268,6 @@ class CommandsSync:
|
||||
# we always take and leave the cooker in state.initial
|
||||
setFeatures.readonly = True
|
||||
|
||||
def updateConfig(self, command, params):
|
||||
options = params[0]
|
||||
command.cooker.updateConfigOpts(options)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
|
||||
@@ -153,25 +153,20 @@ class BBCooker:
|
||||
self.parser = None
|
||||
|
||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
||||
# Let SIGHUP exit as SIGTERM
|
||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
||||
|
||||
def sigterm_exception(self, signum, stackframe):
|
||||
if signum == signal.SIGTERM:
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
elif signum == signal.SIGHUP:
|
||||
bb.warn("Cooker recieved SIGHUP, shutting down...")
|
||||
bb.warn("Cooker recieved SIGTERM, shutting down...")
|
||||
self.state = state.forceshutdown
|
||||
|
||||
def setFeatures(self, features):
|
||||
# we only accept a new feature set if we're in state initial, so we can reset without problems
|
||||
if self.state != state.initial and self.state != state.error:
|
||||
if self.state != state.initial:
|
||||
raise Exception("Illegal state for feature set change")
|
||||
original_featureset = list(self.featureset)
|
||||
for feature in features:
|
||||
self.featureset.setFeature(feature)
|
||||
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
|
||||
if (original_featureset != list(self.featureset)) and self.state != state.error:
|
||||
if (original_featureset != list(self.featureset)):
|
||||
self.reset()
|
||||
|
||||
def initConfigurationData(self):
|
||||
@@ -245,7 +240,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
|
||||
self.data.appendVar(var, val, **loginfo)
|
||||
|
||||
def saveConfigurationVar(self, var, val, default_file, op):
|
||||
@@ -314,7 +309,7 @@ class BBCooker:
|
||||
f.write(total)
|
||||
|
||||
#add to history
|
||||
loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
|
||||
loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
|
||||
self.data.setVar(var, val, **loginfo)
|
||||
|
||||
def removeConfigurationVar(self, var):
|
||||
@@ -376,10 +371,6 @@ class BBCooker:
|
||||
|
||||
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def updateConfigOpts(self,options):
|
||||
for o in options:
|
||||
setattr(self.configuration, o, options[o])
|
||||
|
||||
def runCommands(self, server, data, abort):
|
||||
"""
|
||||
Run any queued asynchronous command
|
||||
@@ -411,7 +402,7 @@ class BBCooker:
|
||||
|
||||
def showEnvironment(self, buildfile = None, pkgs_to_build = []):
|
||||
"""
|
||||
Show the outer or per-recipe environment
|
||||
Show the outer or per-package environment
|
||||
"""
|
||||
fn = None
|
||||
envdata = None
|
||||
@@ -827,6 +818,7 @@ class BBCooker:
|
||||
or to find all machine configuration files one could call:
|
||||
findFilesMatchingInDir(self, 'conf/machines', 'conf')
|
||||
"""
|
||||
import re
|
||||
|
||||
matches = []
|
||||
p = re.compile(re.escape(filepattern))
|
||||
@@ -1236,6 +1228,7 @@ class BBCooker:
|
||||
'''
|
||||
Create a new image with a "require"/"inherit" base_image statement
|
||||
'''
|
||||
import re
|
||||
if timestamp:
|
||||
image_name = os.path.splitext(image)[0]
|
||||
timestr = time.strftime("-%Y%m%d-%H%M%S")
|
||||
@@ -1415,6 +1408,7 @@ class CookerExit(bb.event.Event):
|
||||
class CookerCollectFiles(object):
|
||||
def __init__(self, priorities):
|
||||
self.appendlist = {}
|
||||
self.bbappends = []
|
||||
self.appliedappendlist = []
|
||||
self.bbfile_config_priorities = priorities
|
||||
|
||||
@@ -1444,7 +1438,7 @@ class CookerCollectFiles(object):
|
||||
for ignored in ('SCCS', 'CVS', '.svn'):
|
||||
if ignored in dirs:
|
||||
dirs.remove(ignored)
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
|
||||
found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
|
||||
|
||||
return found
|
||||
|
||||
@@ -1509,6 +1503,7 @@ class CookerCollectFiles(object):
|
||||
# Build a list of .bbappend files for each .bb file
|
||||
for f in bbappend:
|
||||
base = os.path.basename(f).replace('.bbappend', '.bb')
|
||||
self.bbappends.append((base, f))
|
||||
if not base in self.appendlist:
|
||||
self.appendlist[base] = []
|
||||
if f not in self.appendlist[base]:
|
||||
@@ -1534,11 +1529,11 @@ class CookerCollectFiles(object):
|
||||
"""
|
||||
filelist = []
|
||||
f = os.path.basename(fn)
|
||||
for bbappend in self.appendlist:
|
||||
for b in self.bbappends:
|
||||
(bbappend, filename) = b
|
||||
if (bbappend == f) or ('%' in bbappend and bbappend.startswith(f[:bbappend.index('%')])):
|
||||
self.appliedappendlist.append(bbappend)
|
||||
for filename in self.appendlist[bbappend]:
|
||||
filelist.append(filename)
|
||||
filelist.append(filename)
|
||||
return filelist
|
||||
|
||||
def collection_priorities(self, pkgfns):
|
||||
@@ -1558,10 +1553,10 @@ class CookerCollectFiles(object):
|
||||
unmatched.add(regex)
|
||||
|
||||
def findmatch(regex):
|
||||
for bbfile in self.appendlist:
|
||||
for append in self.appendlist[bbfile]:
|
||||
if regex.match(append):
|
||||
return True
|
||||
for b in self.bbappends:
|
||||
(bbfile, append) = b
|
||||
if regex.match(append):
|
||||
return True
|
||||
return False
|
||||
|
||||
for unmatch in unmatched.copy():
|
||||
|
||||
@@ -69,17 +69,6 @@ class ConfigParameters(object):
|
||||
if bbpkgs:
|
||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
||||
|
||||
def updateToServer(self, server):
|
||||
options = {}
|
||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
||||
"verbose", "debug", "dry_run", "dump_signatures",
|
||||
"debug_domains", "extra_assume_provided", "profile"]:
|
||||
options[o] = getattr(self.options, o)
|
||||
|
||||
ret, error = server.runCommand(["updateConfig", options])
|
||||
if error:
|
||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
||||
|
||||
def parseActions(self):
|
||||
# Parse any commandline into actions
|
||||
action = {'action':None, 'msg':None}
|
||||
@@ -238,13 +227,10 @@ class CookerDataBuilder(object):
|
||||
try:
|
||||
self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
||||
except SyntaxError:
|
||||
raise bb.BBHandledException
|
||||
except bb.data_smart.ExpansionError as e:
|
||||
logger.error(str(e))
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
logger.exception("Error parsing configuration files")
|
||||
raise bb.BBHandledException
|
||||
sys.exit(1)
|
||||
|
||||
def _findLayerConf(self, data):
|
||||
return findConfigFile("bblayers.conf", data)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Python Daemonizing helper
|
||||
Python Deamonizing helper
|
||||
|
||||
Configurable daemon behaviors:
|
||||
|
||||
@@ -12,11 +12,8 @@ A failed call to fork() now raises an exception.
|
||||
|
||||
References:
|
||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||
http://www.apuebook.com/apue3e.html
|
||||
2) The Linux Programming Interface: Michael Kerrisk
|
||||
http://man7.org/tlpi/index.html
|
||||
3) Unix Programming Frequently Asked Questions:
|
||||
http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
||||
2) Unix Programming Frequently Asked Questions:
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||
|
||||
Modified to allow a function to be daemonized and return for
|
||||
bitbake use by Richard Purdie
|
||||
@@ -28,7 +25,7 @@ __version__ = "0.2"
|
||||
|
||||
# Standard Python modules.
|
||||
import os # Miscellaneous OS interfaces.
|
||||
import sys # System-specific parameters and functions.
|
||||
import sys # System-specific parameters and functions.
|
||||
|
||||
# Default daemon parameters.
|
||||
# File mode creation mask of the daemon.
|
||||
@@ -131,7 +128,7 @@ def createDaemon(function, logfile):
|
||||
# of methods to accomplish this task. Three are listed below.
|
||||
#
|
||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||
# number of open file descriptors to close. If it doesn't exist, use
|
||||
# number of open file descriptors to close. If it doesn't exists, use
|
||||
# the default value (configurable).
|
||||
#
|
||||
# try:
|
||||
@@ -149,7 +146,7 @@ def createDaemon(function, logfile):
|
||||
# OR
|
||||
#
|
||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||
# that can be opened by this process. If there is no limit on the
|
||||
# that can be opened by this process. If there is not limit on the
|
||||
# resource, use the default value.
|
||||
#
|
||||
import resource # Resource usage information.
|
||||
|
||||
@@ -6,7 +6,7 @@ BitBake 'Data' implementations
|
||||
Functions for interacting with the data structure used by the
|
||||
BitBake build tools.
|
||||
|
||||
The expandKeys and update_data are the most expensive
|
||||
The expandData and update_data are the most expensive
|
||||
operations. At night the cookie monster came by and
|
||||
suggested 'give me cookies on setting the variables and
|
||||
things will work out'. Taking this suggestion into account
|
||||
@@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
|
||||
monster seems to be right. We will track setVar more carefully
|
||||
to have faster update_data and expandKeys operations.
|
||||
|
||||
This is a trade-off between speed and memory again but
|
||||
This is a treade-off between speed and memory again but
|
||||
the speed is more critical here.
|
||||
"""
|
||||
|
||||
@@ -35,7 +35,7 @@ the speed is more critical here.
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import sys, os, re
|
||||
if sys.argv[0][-5:] == "pydoc":
|
||||
@@ -231,6 +231,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
# to a shell, we need to escape the quotes in the var
|
||||
alter = re.sub('"', '\\"', val)
|
||||
alter = re.sub('\n', ' \\\n', alter)
|
||||
alter = re.sub('\\$', '\\\\$', alter)
|
||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||
return 0
|
||||
|
||||
@@ -281,41 +282,6 @@ def emit_func(func, o=sys.__stdout__, d = init()):
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
_functionfmt = """
|
||||
def {function}(d):
|
||||
{body}"""
|
||||
|
||||
def emit_func_python(func, o=sys.__stdout__, d = init()):
|
||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||
|
||||
def write_func(func, o, call = False):
|
||||
body = d.getVar(func, True)
|
||||
if not body.startswith("def"):
|
||||
body = _functionfmt.format(function=func, body=body)
|
||||
|
||||
o.write(body.strip() + "\n\n")
|
||||
if call:
|
||||
o.write(func + "(d)" + "\n\n")
|
||||
|
||||
write_func(func, o, True)
|
||||
pp = bb.codeparser.PythonParser(func, logger)
|
||||
pp.parse_python(d.getVar(func, True))
|
||||
newdeps = pp.execs
|
||||
newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
|
||||
seen = set()
|
||||
while newdeps:
|
||||
deps = newdeps
|
||||
seen |= deps
|
||||
newdeps = set()
|
||||
for dep in deps:
|
||||
if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
|
||||
write_func(dep, o)
|
||||
pp = bb.codeparser.PythonParser(dep, logger)
|
||||
pp.parse_python(d.getVar(dep, True))
|
||||
newdeps |= pp.execs
|
||||
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
|
||||
newdeps -= seen
|
||||
|
||||
def update_data(d):
|
||||
"""Performs final steps upon the datastore, including application of overrides"""
|
||||
d.finalize(parent = True)
|
||||
|
||||
@@ -263,7 +263,7 @@ class VariableHistory(object):
|
||||
flag = ''
|
||||
o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
|
||||
if len(history) > 1:
|
||||
o.write("# pre-expansion value:\n")
|
||||
o.write("# computed:\n")
|
||||
o.write('# "%s"\n' % (commentVal))
|
||||
else:
|
||||
o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
|
||||
@@ -334,7 +334,7 @@ class DataSmart(MutableMapping):
|
||||
break
|
||||
except ExpansionError:
|
||||
raise
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise ExpansionError(varname, s, exc)
|
||||
@@ -513,15 +513,10 @@ class DataSmart(MutableMapping):
|
||||
def _setvar_update_overrides(self, var):
|
||||
# aka pay the cookie monster
|
||||
override = var[var.rfind('_')+1:]
|
||||
shortvar = var[:var.rfind('_')]
|
||||
while override:
|
||||
if len(override) > 0:
|
||||
if override not in self._seen_overrides:
|
||||
self._seen_overrides[override] = set()
|
||||
self._seen_overrides[override].add( var )
|
||||
override = None
|
||||
if "_" in shortvar:
|
||||
override = var[shortvar.rfind('_')+1:]
|
||||
shortvar = var[:shortvar.rfind('_')]
|
||||
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
return self.getVarFlag(var, "_content", expand, noweakdefault)
|
||||
@@ -589,7 +584,7 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
self.dict[var][flag] = value
|
||||
|
||||
if flag == "_defaultval" and '_' in var:
|
||||
if flag == "defaultval" and '_' in var:
|
||||
self._setvar_update_overrides(var)
|
||||
|
||||
if flag == "unexport" or flag == "export":
|
||||
@@ -605,8 +600,8 @@ class DataSmart(MutableMapping):
|
||||
if local_var is not None:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["_defaultval"])
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
# Only getvar (flag == _content) hits the expand cache
|
||||
cachename = None
|
||||
@@ -616,9 +611,8 @@ class DataSmart(MutableMapping):
|
||||
cachename = var + "[" + flag + "]"
|
||||
value = self.expand(value, cachename)
|
||||
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
|
||||
removes = [self.expand(r) for r in local_var["_removeactive"]]
|
||||
filtered = filter(lambda v: v not in removes,
|
||||
value.split())
|
||||
filtered = filter(lambda v: v not in local_var["_removeactive"],
|
||||
value.split(" "))
|
||||
value = " ".join(filtered)
|
||||
if expand:
|
||||
# We need to ensure the expand cache has the correct value
|
||||
|
||||
@@ -72,7 +72,7 @@ def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
try:
|
||||
ret = handler(event)
|
||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
||||
except bb.parse.SkipPackage:
|
||||
raise
|
||||
except Exception:
|
||||
etype, value, tb = sys.exc_info()
|
||||
@@ -94,7 +94,10 @@ def fire_class_handlers(event, d):
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
execute_handler(name, handler, event, d)
|
||||
try:
|
||||
execute_handler(name, handler, event, d)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ui_queue = []
|
||||
@atexit.register
|
||||
@@ -594,11 +597,11 @@ class MetadataEvent(Event):
|
||||
def __init__(self, eventtype, eventdata):
|
||||
Event.__init__(self)
|
||||
self.type = eventtype
|
||||
self._localdata = eventdata
|
||||
self.data = eventdata
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to run sanity checks, either raise errors or generate events as return status.
|
||||
Event to runs sanity checks, either raise errors or generate events as return status.
|
||||
"""
|
||||
def __init__(self, generateevents = True):
|
||||
Event.__init__(self)
|
||||
@@ -606,7 +609,7 @@ class SanityCheck(Event):
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has passed
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
|
||||
@@ -56,11 +56,8 @@ class BBFetchException(Exception):
|
||||
|
||||
class MalformedUrl(BBFetchException):
|
||||
"""Exception raised when encountering an invalid url"""
|
||||
def __init__(self, url, message=''):
|
||||
if message:
|
||||
msg = message
|
||||
else:
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
def __init__(self, url):
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
@@ -374,11 +371,8 @@ def decodeurl(url):
|
||||
p = {}
|
||||
if parm:
|
||||
for s in parm.split(';'):
|
||||
if s:
|
||||
if not '=' in s:
|
||||
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
|
||||
@@ -543,8 +537,8 @@ def verify_checksum(ud, d):
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
|
||||
if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if strict and not (ud.md5_expected or ud.sha256_expected):
|
||||
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
@@ -981,6 +975,7 @@ def get_file_checksums(filelist, pn):
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
for f in glob.glob(pth):
|
||||
@@ -990,12 +985,15 @@ def get_file_checksums(filelist, pn):
|
||||
checksum = checksum_file(f)
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
continue
|
||||
elif os.path.isdir(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
continue
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
return checksums
|
||||
@@ -1198,7 +1196,7 @@ class FetchMethod(object):
|
||||
(file, urldata.parm.get('unpack')))
|
||||
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz', 'lz']:
|
||||
if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
@@ -1219,10 +1217,6 @@ class FetchMethod(object):
|
||||
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.xz'):
|
||||
cmd = 'xz -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.tar.lz'):
|
||||
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
|
||||
elif file.endswith('.lz'):
|
||||
cmd = 'lzip -dc %s > %s' % (file, efile)
|
||||
elif file.endswith('.zip') or file.endswith('.jar'):
|
||||
try:
|
||||
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
|
||||
@@ -1268,13 +1262,8 @@ class FetchMethod(object):
|
||||
# items. So, only do so for file:// entries.
|
||||
if urldata.type == "file" and urldata.path.find("/") != -1:
|
||||
destdir = urldata.path.rsplit("/", 1)[0]
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir') + "/" + destdir
|
||||
else:
|
||||
if urldata.parm.get('subdir') != None:
|
||||
destdir = urldata.parm.get('subdir')
|
||||
else:
|
||||
destdir = "."
|
||||
destdir = "."
|
||||
bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
|
||||
cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
|
||||
|
||||
@@ -1569,7 +1558,6 @@ from . import bzr
|
||||
from . import hg
|
||||
from . import osc
|
||||
from . import repo
|
||||
from . import clearcase
|
||||
|
||||
methods.append(local.Local())
|
||||
methods.append(wget.Wget())
|
||||
@@ -1585,4 +1573,3 @@ methods.append(bzr.Bzr())
|
||||
methods.append(hg.Hg())
|
||||
methods.append(osc.Osc())
|
||||
methods.append(repo.Repo())
|
||||
methods.append(clearcase.ClearCase())
|
||||
|
||||
@@ -1,263 +0,0 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' clearcase implementation
|
||||
|
||||
The clearcase fetcher is used to retrieve files from a ClearCase repository.
|
||||
|
||||
Usage in the recipe:
|
||||
|
||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
||||
PV = "${@d.getVar("SRCREV").replace("/", "+")}"
|
||||
|
||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
||||
|
||||
Supported SRC_URI options are:
|
||||
|
||||
- vob
|
||||
(required) The name of the clearcase VOB (with prepending "/")
|
||||
|
||||
- module
|
||||
The module in the selected VOB (with prepending "/")
|
||||
|
||||
The module and vob parameters are combined to create
|
||||
the following load rule in the view config spec:
|
||||
load <vob><module>
|
||||
|
||||
- proto
|
||||
http or https
|
||||
|
||||
Related variables:
|
||||
|
||||
CCASE_CUSTOM_CONFIG_SPEC
|
||||
Write a config spec to this variable in your recipe to use it instead
|
||||
of the default config spec generated by this fetcher.
|
||||
Please note that the SRCREV loses its functionality if you specify
|
||||
this variable. SRCREV is still used to label the archive after a fetch,
|
||||
but it doesn't define what's fetched.
|
||||
|
||||
User credentials:
|
||||
cleartool:
|
||||
The login of cleartool is handled by the system. No special steps needed.
|
||||
|
||||
rcleartool:
|
||||
In order to use rcleartool with authenticated users an `rcleartool login` is
|
||||
necessary before using the fetcher.
|
||||
"""
|
||||
# Copyright (C) 2014 Siemens AG
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bb.fetch2 import logger
|
||||
from distutils import spawn
|
||||
|
||||
class ClearCase(FetchMethod):
|
||||
"""Class to fetch urls via 'clearcase'"""
|
||||
def init(self, d):
|
||||
pass
|
||||
|
||||
def supports(self, ud, d):
|
||||
"""
|
||||
Check to see if a given url can be fetched with Clearcase.
|
||||
"""
|
||||
return ud.type in ['ccrc']
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(1, "ClearCase: %s", msg)
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init ClearCase specific variable within url data
|
||||
"""
|
||||
ud.proto = "https"
|
||||
if 'protocol' in ud.parm:
|
||||
ud.proto = ud.parm['protocol']
|
||||
if not ud.proto in ('http', 'https'):
|
||||
raise fetch2.ParameterError("Invalid protocol type", ud.url)
|
||||
|
||||
ud.vob = ''
|
||||
if 'vob' in ud.parm:
|
||||
ud.vob = ud.parm['vob']
|
||||
else:
|
||||
msg = ud.url+": vob must be defined so the fetcher knows what to get."
|
||||
raise MissingParameterError('vob', msg)
|
||||
|
||||
if 'module' in ud.parm:
|
||||
ud.module = ud.parm['module']
|
||||
else:
|
||||
ud.module = ""
|
||||
|
||||
ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool")
|
||||
|
||||
if data.getVar("SRCREV", d, True) == "INVALID":
|
||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
||||
|
||||
ud.label = d.getVar("SRCREV")
|
||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True)
|
||||
|
||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
||||
|
||||
ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
|
||||
ud.module.replace("/", "."),
|
||||
ud.label.replace("/", "."))
|
||||
|
||||
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
|
||||
ud.csname = "%s-config-spec" % (ud.identifier)
|
||||
ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type)
|
||||
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
|
||||
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
|
||||
ud.localfile = "%s.tar.gz" % (ud.identifier)
|
||||
|
||||
self.debug("host = %s" % ud.host)
|
||||
self.debug("path = %s" % ud.path)
|
||||
self.debug("server = %s" % ud.server)
|
||||
self.debug("proto = %s" % ud.proto)
|
||||
self.debug("type = %s" % ud.type)
|
||||
self.debug("vob = %s" % ud.vob)
|
||||
self.debug("module = %s" % ud.module)
|
||||
self.debug("basecmd = %s" % ud.basecmd)
|
||||
self.debug("label = %s" % ud.label)
|
||||
self.debug("ccasedir = %s" % ud.ccasedir)
|
||||
self.debug("viewdir = %s" % ud.viewdir)
|
||||
self.debug("viewname = %s" % ud.viewname)
|
||||
self.debug("configspecfile = %s" % ud.configspecfile)
|
||||
self.debug("localfile = %s" % ud.localfile)
|
||||
|
||||
ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||
|
||||
def _build_ccase_command(self, ud, command):
|
||||
"""
|
||||
Build up a commandline based on ud
|
||||
command is: mkview, setcs, rmview
|
||||
"""
|
||||
options = []
|
||||
|
||||
if "rcleartool" in ud.basecmd:
|
||||
options.append("-server %s" % ud.server)
|
||||
|
||||
basecmd = "%s %s" % (ud.basecmd, command)
|
||||
|
||||
if command is 'mkview':
|
||||
if not "rcleartool" in ud.basecmd:
|
||||
# Cleartool needs a -snapshot view
|
||||
options.append("-snapshot")
|
||||
options.append("-tag %s" % ud.viewname)
|
||||
options.append(ud.viewdir)
|
||||
|
||||
elif command is 'rmview':
|
||||
options.append("-force")
|
||||
options.append("%s" % ud.viewdir)
|
||||
|
||||
elif command is 'setcs':
|
||||
options.append("-overwrite")
|
||||
options.append(ud.configspecfile)
|
||||
|
||||
else:
|
||||
raise FetchError("Invalid ccase command %s" % command)
|
||||
|
||||
ccasecmd = "%s %s" % (basecmd, " ".join(options))
|
||||
self.debug("ccasecmd = %s" % ccasecmd)
|
||||
return ccasecmd
|
||||
|
||||
def _write_configspec(self, ud, d):
|
||||
"""
|
||||
Create config spec file (ud.configspecfile) for ccase view
|
||||
"""
|
||||
config_spec = ""
|
||||
custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
|
||||
if custom_config_spec is not None:
|
||||
for line in custom_config_spec.split("\\n"):
|
||||
config_spec += line+"\n"
|
||||
bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
|
||||
else:
|
||||
config_spec += "element * CHECKEDOUT\n"
|
||||
config_spec += "element * %s\n" % ud.label
|
||||
config_spec += "load %s%s\n" % (ud.vob, ud.module)
|
||||
|
||||
logger.info("Using config spec: \n%s" % config_spec)
|
||||
|
||||
with open(ud.configspecfile, 'w') as f:
|
||||
f.write(config_spec)
|
||||
|
||||
def _remove_view(self, ud, d):
|
||||
if os.path.exists(ud.viewdir):
|
||||
os.chdir(ud.ccasedir)
|
||||
cmd = self._build_ccase_command(ud, 'rmview');
|
||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("rmview output: %s", output)
|
||||
|
||||
def need_update(self, ud, d):
|
||||
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
|
||||
ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
|
||||
return True
|
||||
if os.path.exists(ud.localpath):
|
||||
return False
|
||||
return True
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
return False, ud.identifier
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch url"""
|
||||
|
||||
# Make a fresh view
|
||||
bb.utils.mkdirhier(ud.ccasedir)
|
||||
self._write_configspec(ud, d)
|
||||
cmd = self._build_ccase_command(ud, 'mkview')
|
||||
logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
try:
|
||||
runfetchcmd(cmd, d)
|
||||
except FetchError as e:
|
||||
if "CRCLI2008E" in e.msg:
|
||||
raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
|
||||
else:
|
||||
raise e
|
||||
|
||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
||||
os.chdir(ud.viewdir)
|
||||
cmd = self._build_ccase_command(ud, 'setcs');
|
||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
output = runfetchcmd(cmd, d)
|
||||
logger.info("%s", output)
|
||||
|
||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
||||
shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
|
||||
|
||||
# Clean clearcase meta-data before tar
|
||||
|
||||
runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
|
||||
|
||||
# Clean up so we can create a new view next time
|
||||
self.clean(ud, d);
|
||||
|
||||
def clean(self, ud, d):
|
||||
self._remove_view(ud, d)
|
||||
bb.utils.remove(ud.configspecfile)
|
||||
@@ -67,7 +67,6 @@ Supported SRC_URI options are:
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import re
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -124,7 +123,7 @@ class Git(FetchMethod):
|
||||
ud.branches[name] = branch
|
||||
ud.unresolvedrev[name] = branch
|
||||
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0"
|
||||
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
|
||||
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||
|
||||
@@ -246,7 +245,7 @@ class Git(FetchMethod):
|
||||
subdir = ud.parm.get("subpath", "")
|
||||
if subdir != "":
|
||||
readpathspec = ":%s" % (subdir)
|
||||
def_destsuffix = "%s/" % os.path.basename(subdir)
|
||||
def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
|
||||
else:
|
||||
readpathspec = ""
|
||||
def_destsuffix = "git/"
|
||||
@@ -276,7 +275,7 @@ class Git(FetchMethod):
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir), d)
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
@@ -340,50 +339,10 @@ class Git(FetchMethod):
|
||||
"""
|
||||
Compute the HEAD revision for the url
|
||||
"""
|
||||
if ud.unresolvedrev[name][:5] == "refs/":
|
||||
search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
else:
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
|
||||
output = self._lsremote(ud, d, search)
|
||||
return output.split()[0]
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
||||
by searching through the tags output of ls-remote, comparing
|
||||
versions and returning the highest match.
|
||||
"""
|
||||
verstring = ""
|
||||
tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
|
||||
try:
|
||||
output = self._lsremote(ud, d, "refs/tags/*^{}")
|
||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
||||
return ""
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
break
|
||||
|
||||
line = line.split("/")[-1]
|
||||
# Ignore non-released branches
|
||||
m = re.search("(alpha|beta|rc|final)+", line)
|
||||
if m:
|
||||
continue
|
||||
|
||||
# search for version in the line
|
||||
tag = tagregex.search(line)
|
||||
if tag == None:
|
||||
continue
|
||||
|
||||
tag = tag.group('pver')
|
||||
tag = tag.replace("_", ".")
|
||||
|
||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
||||
continue
|
||||
verstring = tag
|
||||
|
||||
return verstring
|
||||
|
||||
def _build_revision(self, ud, d, name):
|
||||
return ud.revisions[name]
|
||||
|
||||
|
||||
@@ -2,16 +2,6 @@
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' git submodules implementation
|
||||
|
||||
Inherits from and extends the Git fetcher to retrieve submodules of a git repository
|
||||
after cloning.
|
||||
|
||||
SRC_URI = "gitsm://<see Git fetcher for syntax>"
|
||||
|
||||
See the Git fetcher, git://, for usage documentation.
|
||||
|
||||
NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2013 Richard Purdie
|
||||
|
||||
@@ -123,10 +123,7 @@ class Hg(FetchMethod):
|
||||
else:
|
||||
cmd = "%s pull" % (basecmd)
|
||||
elif command == "update":
|
||||
if ud.user and ud.pswd:
|
||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||
cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ class Local(FetchMethod):
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
||||
logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
|
||||
@@ -103,15 +103,22 @@ class Perforce(FetchMethod):
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
# If a label is specified, we use that as our filename
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
if "label" in parm:
|
||||
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||
return
|
||||
|
||||
base = path
|
||||
which = path.find('/...')
|
||||
if which != -1:
|
||||
base = path[:which-1]
|
||||
|
||||
base = self._strip_leading_slashes(base)
|
||||
|
||||
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""
|
||||
|
||||
@@ -101,8 +101,7 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
transportuser = ud.parm.get("transportuser", "")
|
||||
svncmd = "%s co %s %s://%s%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, transportuser, svnroot, ud.module, suffix, ud.module)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
else:
|
||||
|
||||
@@ -25,9 +25,6 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import re
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
@@ -37,7 +34,6 @@ from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
class Wget(FetchMethod):
|
||||
"""Class to fetch urls via 'wget'"""
|
||||
@@ -108,270 +104,3 @@ class Wget(FetchMethod):
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
"""
|
||||
Find and group name, version and archive type in the given string s
|
||||
"""
|
||||
bb.debug(3, "parse_path(%s, %s)" % (regex.pattern, s))
|
||||
m = regex.search(s)
|
||||
if m:
|
||||
bb.debug(3, "%s, %s, %s" % (m.group('name'), m.group('ver'), m.group('type')))
|
||||
return (m.group('name'), m.group('ver'), m.group('type'))
|
||||
return None
|
||||
|
||||
def _modelate_version(self, version):
|
||||
if version[0] in ['.', '-']:
|
||||
if version[1].isdigit():
|
||||
version = version[1] + version[0] + version[2:len(version)]
|
||||
else:
|
||||
version = version[1:len(version)]
|
||||
|
||||
version = re.sub('\-', '.', version)
|
||||
version = re.sub('_', '.', version)
|
||||
version = re.sub('(rc)+', '.-1.', version)
|
||||
version = re.sub('(alpha)+', '.-3.', version)
|
||||
version = re.sub('(beta)+', '.-2.', version)
|
||||
if version[0] == 'v':
|
||||
version = version[1:len(version)]
|
||||
return version
|
||||
|
||||
def _vercmp(self, old, new):
|
||||
"""
|
||||
Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
|
||||
purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
|
||||
for simplicity as it's somehow difficult to get from various upstream format
|
||||
"""
|
||||
|
||||
(oldpn, oldpv, oldsuffix) = old
|
||||
(newpn, newpv, newsuffix) = new
|
||||
|
||||
"""
|
||||
Check for a new suffix type that we have never heard of before
|
||||
"""
|
||||
if (newsuffix):
|
||||
m = self.suffix_regex_comp.search(newsuffix)
|
||||
if not m:
|
||||
bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
|
||||
return False
|
||||
|
||||
"""
|
||||
Not our package so ignore it
|
||||
"""
|
||||
if oldpn != newpn:
|
||||
return False
|
||||
|
||||
oldpv = self._modelate_version(oldpv)
|
||||
newpv = self._modelate_version(newpv)
|
||||
|
||||
if bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, "")) < 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def _fetch_index(self, uri, ud, d):
|
||||
"""
|
||||
Run fetch checkstatus to get directory information
|
||||
"""
|
||||
f = tempfile.NamedTemporaryFile(dir="/tmp/s/", delete=False)
|
||||
|
||||
agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
|
||||
fetchcmd = self.basecmd
|
||||
fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
|
||||
try:
|
||||
self._runwget(ud, d, fetchcmd, True)
|
||||
fetchresult = f.read()
|
||||
except bb.fetch2.BBFetchException:
|
||||
fetchresult = ""
|
||||
|
||||
f.close()
|
||||
# os.unlink(f.name)
|
||||
return fetchresult
|
||||
|
||||
def _check_latest_dir(self, url, versionstring, ud, d):
|
||||
"""
|
||||
Return the name of the directory with the greatest package version
|
||||
If error or no version, return None
|
||||
"""
|
||||
bb.debug(3, "DirURL: %s, %s" % (url, versionstring))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
return None
|
||||
|
||||
valid = 0
|
||||
prefix = ''
|
||||
regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
|
||||
m = regex.search(versionstring)
|
||||
if m:
|
||||
version = ('', m.group(2), '')
|
||||
prefix = m.group(1)
|
||||
bb.debug(3, "version: %s, prefix: %s" % (version, prefix))
|
||||
else:
|
||||
version = ('', versionstring, '')
|
||||
|
||||
for href in soup.find_all('a', href=True):
|
||||
bb.debug(3, "href: %s" % (href['href']))
|
||||
if href['href'].find(versionstring) >= 0:
|
||||
valid = 1
|
||||
m = regex.search(href['href'].strip("/"))
|
||||
if m:
|
||||
thisversion = ('', m.group(2), '')
|
||||
if thisversion and self._vercmp(version, thisversion) == True:
|
||||
version = thisversion
|
||||
|
||||
if valid:
|
||||
bb.debug(3, "Would return %s" % (prefix+version[1]))
|
||||
return prefix+version[1]
|
||||
else:
|
||||
bb.debug(3, "Not Valid")
|
||||
return None
|
||||
|
||||
def _check_latest_version(self, url, package, current_version, ud, d):
|
||||
"""
|
||||
Return the latest version of a package inside a given directory path
|
||||
If error or no version, return None
|
||||
"""
|
||||
valid = 0
|
||||
version = ('', '', '')
|
||||
|
||||
bb.debug(3, "VersionURL: %s" % (url))
|
||||
soup = BeautifulSoup(self._fetch_index(url, ud, d))
|
||||
if not soup:
|
||||
bb.debug(3, "*** %s NO SOUP" % (url))
|
||||
return None
|
||||
|
||||
pn_regex = d.getVar('REGEX', True)
|
||||
if pn_regex:
|
||||
pn_regex = re.compile(pn_regex)
|
||||
bb.debug(3, "pn_regex = '%s'" % (pn_regex.pattern))
|
||||
|
||||
for line in soup.find_all('a', href=True):
|
||||
newver = None
|
||||
bb.debug(3, "line = '%s'" % (line['href']))
|
||||
if pn_regex:
|
||||
m = pn_regex.search(line['href'])
|
||||
if m:
|
||||
bb.debug(3, "Pver = '%s'" % (m.group('pver')))
|
||||
newver = ('', m.group('pver'), '')
|
||||
else:
|
||||
m = pn_regex.search(str(line))
|
||||
if m:
|
||||
bb.debug(3, "Pver = '%s'" % (m.group('pver')))
|
||||
newver = ('', m.group('pver'), '')
|
||||
else:
|
||||
newver = self._parse_path(self.package_custom_regex_comp, line['href'])
|
||||
if not newver:
|
||||
newver = self._parse_path(self.package_custom_regex_comp, str(line))
|
||||
|
||||
if newver:
|
||||
bb.debug(3, "Upstream version found: %s" % newver[1])
|
||||
if valid == 0:
|
||||
version = newver
|
||||
valid = 1
|
||||
elif self._vercmp(version, newver) == True:
|
||||
version = newver
|
||||
|
||||
# check whether a valid package and version were found
|
||||
bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
|
||||
(package, version[1] or "N/A", current_version[1]))
|
||||
|
||||
if valid and version:
|
||||
return re.sub('_', '.', version[1])
|
||||
|
||||
return None
|
||||
|
||||
def _init_regexes(self, package):
|
||||
"""
|
||||
Match as many patterns as possible such as:
|
||||
gnome-common-2.20.0.tar.gz (most common format)
|
||||
gtk+-2.90.1.tar.gz
|
||||
xf86-input-synaptics-12.6.9.tar.gz
|
||||
dri2proto-2.3.tar.gz
|
||||
blktool_4.orig.tar.gz
|
||||
libid3tag-0.15.1b.tar.gz
|
||||
unzip552.tar.gz
|
||||
icu4c-3_6-src.tgz
|
||||
genext2fs_1.3.orig.tar.gz
|
||||
gst-fluendo-mp3
|
||||
"""
|
||||
# match most patterns which uses "-" as separator to version digits
|
||||
pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]"
|
||||
# a loose pattern such as for unzip552.tar.gz
|
||||
pn_prefix2 = "[a-zA-Z]+"
|
||||
# a loose pattern such as for 80325-quicky-0.4.tar.gz
|
||||
pn_prefix3 = "[0-9]+[\-]?[a-zA-Z]+"
|
||||
# Save the Package Name (pn) Regex for use later
|
||||
pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
|
||||
|
||||
# match version
|
||||
pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
|
||||
|
||||
# match arch
|
||||
parch_regex = "\-source|_all_"
|
||||
|
||||
# src.rpm extension was added only for rpm package. Can be removed if the rpm
|
||||
# packaged will always be considered as having to be manually upgraded
|
||||
psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
|
||||
|
||||
# match name, version and archive type of a package
|
||||
self.package_regex_comp = re.compile("(?P<name>%s?)\.?v?(?P<ver>%s)(?P<arch>%s)?[\.\-](?P<type>%s$)"
|
||||
% (pn_regex, pver_regex, parch_regex, psuffix_regex))
|
||||
self.suffix_regex_comp = re.compile(psuffix_regex)
|
||||
|
||||
# search for version matches on folders inside the path, like:
|
||||
# "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
|
||||
self.dirver_regex_comp = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([\-_]r\d+)*)/")
|
||||
|
||||
# get current version and make custom regex for search in uri's
|
||||
version = self._parse_path(self.package_regex_comp, package)
|
||||
if version:
|
||||
self.package_custom_regex_comp = re.compile(
|
||||
"(?P<name>%s)(?P<ver>%s)(?P<arch>%s)?[\.\-](?P<type>%s)$" %
|
||||
(version[0], pver_regex, parch_regex, psuffix_regex))
|
||||
|
||||
def latest_versionstring(self, ud, d):
|
||||
"""
|
||||
Manipulate the URL and try to obtain the latest package version
|
||||
|
||||
sanity check to ensure same name and type.
|
||||
"""
|
||||
package = ud.path.split("/")[-1]
|
||||
regex_uri = d.getVar("REGEX_URI", True)
|
||||
newpath = regex_uri or ud.path
|
||||
pupver = ""
|
||||
|
||||
self._init_regexes(package)
|
||||
current_version = ('', d.getVar('PV', True), '')
|
||||
|
||||
"""possible to have no version in pkg name, such as spectrum-fw"""
|
||||
if not re.search("\d+", package):
|
||||
return re.sub('_', '.', current_version[1])
|
||||
|
||||
if not regex_uri:
|
||||
# generate the new uri with the appropriate latest directory
|
||||
m = self.dirver_regex_comp.search(ud.path)
|
||||
if m:
|
||||
dirver = m.group('dirver')
|
||||
newuri = bb.fetch.encodeurl([ud.type, ud.host,
|
||||
ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
|
||||
new_dirver = self._check_latest_dir(newuri, dirver, ud, d)
|
||||
if new_dirver and dirver != new_dirver:
|
||||
newpath = ud.path.replace(dirver, new_dirver, True)
|
||||
|
||||
newpath = newpath.split(package)[0] or "/" # path to directory
|
||||
newuri = bb.fetch.encodeurl([ud.type, ud.host, newpath, ud.user, ud.pswd, {}])
|
||||
else:
|
||||
newuri = newpath
|
||||
|
||||
newversion = self._check_latest_version(newuri, package,
|
||||
current_version, ud, d)
|
||||
while not newversion:
|
||||
# maybe it's hiding in a download directory so try there
|
||||
newuri = "/".join(newuri.split("/")[0:-2]) + "/download"
|
||||
if newuri == "/download" or newuri == "http://download":
|
||||
break
|
||||
newversion = self._check_latest_version(newuri, package,
|
||||
current_version, ud, d)
|
||||
|
||||
return newversion or ""
|
||||
|
||||
@@ -52,10 +52,10 @@ def getMountedDev(path):
|
||||
parentDev = os.stat(path).st_dev
|
||||
currentDev = parentDev
|
||||
# When the current directory's device is different from the
|
||||
# parent's, then the current directory is a mount point
|
||||
# parrent's, then the current directory is a mount point
|
||||
while parentDev == currentDev:
|
||||
mountPoint = path
|
||||
# Use dirname to get the parent's directory
|
||||
# Use dirname to get the parrent's directory
|
||||
path = os.path.dirname(path)
|
||||
# Reach the "/"
|
||||
if path == mountPoint:
|
||||
@@ -77,7 +77,7 @@ def getDiskData(BBDirs, configuration):
|
||||
"""Prepare disk data for disk space monitor"""
|
||||
|
||||
# Save the device IDs, need the ID to be unique (the dictionary's key is
|
||||
# unique), so that when more than one directory is located on the same
|
||||
# unique), so that when more than one directories are located in the same
|
||||
# device, we just monitor it once
|
||||
devDict = {}
|
||||
for pathSpaceInode in BBDirs.split():
|
||||
@@ -187,11 +187,11 @@ class diskMonitor:
|
||||
if self.spaceInterval and self.inodeInterval:
|
||||
self.enableMonitor = True
|
||||
# These are for saving the previous disk free space and inode, we
|
||||
# use them to avoid printing too many warning messages
|
||||
# use them to avoid print too many warning messages
|
||||
self.preFreeS = {}
|
||||
self.preFreeI = {}
|
||||
# This is for STOPTASKS and ABORT, to avoid printing the message
|
||||
# repeatedly while waiting for the tasks to finish
|
||||
# This is for STOPTASKS and ABORT, to avoid print the message repeatly
|
||||
# during waiting the tasks to finish
|
||||
self.checked = {}
|
||||
for k in self.devDict:
|
||||
self.preFreeS[k] = 0
|
||||
@@ -239,9 +239,11 @@ class diskMonitor:
|
||||
freeInode = st.f_favail
|
||||
|
||||
if minInode and freeInode < minInode:
|
||||
# Some filesystems use dynamic inodes so can't run out
|
||||
# (e.g. btrfs). This is reported by the inode count being 0.
|
||||
# Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
|
||||
# zero, this is a feature of the fs, we disable the inode
|
||||
# checking for such a fs.
|
||||
if st.f_files == 0:
|
||||
logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
|
||||
self.devDict[k][2] = None
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
|
||||
@@ -202,8 +202,8 @@ if __name__ == '__main__':
|
||||
print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
|
||||
print(rec5._replace(k=222).count(2)) # MyMixIn's
|
||||
|
||||
# Note that behavior: the standard namedtuple methods cannot be
|
||||
# overridden by a foreign mix-in -- even if the mix-in is declared
|
||||
# None that behavior: the standard namedtuple methods cannot be
|
||||
# overriden by a foreign mix-in -- even if the mix-in is declared
|
||||
# as the leftmost base class (but, obviously, you can override them
|
||||
# in the defined class or its subclasses):
|
||||
|
||||
|
||||
@@ -49,11 +49,8 @@ class ParseError(Exception):
|
||||
else:
|
||||
return "ParseError in %s: %s" % (self.filename, self.msg)
|
||||
|
||||
class SkipRecipe(Exception):
|
||||
"""Exception raised to skip this recipe"""
|
||||
|
||||
class SkipPackage(SkipRecipe):
|
||||
"""Exception raised to skip this recipe (use SkipRecipe in new code)"""
|
||||
class SkipPackage(Exception):
|
||||
"""Exception raised to skip this package"""
|
||||
|
||||
__mtime_cache = {}
|
||||
def cached_mtime(f):
|
||||
@@ -124,6 +121,7 @@ def resolve_file(fn, d):
|
||||
if not os.path.isfile(fn):
|
||||
raise IOError("file %s not found" % fn)
|
||||
|
||||
logger.debug(2, "LOAD %s", fn)
|
||||
return fn
|
||||
|
||||
# Used by OpenEmbedded metadata
|
||||
|
||||
@@ -128,7 +128,7 @@ class DataNode(AstNode):
|
||||
if 'flag' in groupd and groupd['flag'] != None:
|
||||
flag = groupd['flag']
|
||||
elif groupd["lazyques"]:
|
||||
flag = "_defaultval"
|
||||
flag = "defaultval"
|
||||
|
||||
loginfo['op'] = op
|
||||
loginfo['detail'] = groupd["value"]
|
||||
@@ -337,10 +337,8 @@ def finalize(fn, d, variant = None):
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def _create_variants(datastores, names, function):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
if onlyfinalise and name not in onlyfinalise:
|
||||
return
|
||||
new_d = bb.data.createCopy(orig_d)
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
@@ -377,7 +375,7 @@ def _expand_versions(versions):
|
||||
def multi_finalize(fn, d):
|
||||
appends = (d.getVar("__BBAPPEND", True) or "").split()
|
||||
for append in appends:
|
||||
logger.debug(1, "Appending .bbappend file %s to %s", append, fn)
|
||||
logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
|
||||
bb.parse.BBHandler.handle(append, d, True)
|
||||
|
||||
onlyfinalise = d.getVar("__ONLYFINALISE", False)
|
||||
@@ -386,7 +384,7 @@ def multi_finalize(fn, d):
|
||||
d = bb.data.createCopy(safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
datastores = {"": safe_d}
|
||||
|
||||
@@ -429,10 +427,10 @@ def multi_finalize(fn, d):
|
||||
verfunc(pv, d, safe_d)
|
||||
try:
|
||||
finalize(fn, d)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
_create_variants(datastores, versions, verfunc, onlyfinalise)
|
||||
_create_variants(datastores, versions, verfunc)
|
||||
|
||||
extended = d.getVar("BBCLASSEXTEND", True) or ""
|
||||
if extended:
|
||||
@@ -462,14 +460,14 @@ def multi_finalize(fn, d):
|
||||
bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d)
|
||||
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc)
|
||||
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
except bb.parse.SkipPackage as e:
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
|
||||
@@ -86,7 +86,7 @@ def inherit(files, fn, lineno, d):
|
||||
file = abs_fn
|
||||
|
||||
if not file in __inherit_cache:
|
||||
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
|
||||
logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
|
||||
__inherit_cache.append( file )
|
||||
d.setVar('__inherit_cache', __inherit_cache)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
@@ -124,6 +124,12 @@ def handle(fn, d, include):
|
||||
__classname__ = ""
|
||||
__residue__ = []
|
||||
|
||||
|
||||
if include == 0:
|
||||
logger.debug(2, "BB %s: handle(data)", fn)
|
||||
else:
|
||||
logger.debug(2, "BB %s: handle(data, include)", fn)
|
||||
|
||||
base_name = os.path.basename(fn)
|
||||
(root, ext) = os.path.splitext(base_name)
|
||||
init(d)
|
||||
@@ -154,7 +160,7 @@ def handle(fn, d, include):
|
||||
|
||||
try:
|
||||
statements.eval(d)
|
||||
except bb.parse.SkipRecipe:
|
||||
except bb.parse.SkipPackage:
|
||||
bb.data.setVar("__SKIPPED", True, d)
|
||||
if include == 0:
|
||||
return { "" : d }
|
||||
|
||||
@@ -199,9 +199,7 @@ class PersistData(object):
|
||||
del self.data[domain][key]
|
||||
|
||||
def connect(database):
|
||||
connection = sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
connection.execute("pragma synchronous = off;")
|
||||
return connection
|
||||
return sqlite3.connect(database, timeout=5, isolation_level=None)
|
||||
|
||||
def persist(domain, d):
|
||||
"""Convenience factory for SQLTable objects based upon metadata"""
|
||||
|
||||
@@ -38,4 +38,4 @@ class ExitSignal(ShellError):
|
||||
|
||||
class ReturnSignal(ShellError):
|
||||
"""Exit signal."""
|
||||
pass
|
||||
pass
|
||||
@@ -97,7 +97,7 @@ class RunQueueScheduler(object):
|
||||
def __init__(self, runqueue, rqdata):
|
||||
"""
|
||||
The default scheduler just returns the first buildable task (the
|
||||
priority map is sorted by task number)
|
||||
priority map is sorted by task numer)
|
||||
"""
|
||||
self.rq = runqueue
|
||||
self.rqdata = rqdata
|
||||
@@ -186,7 +186,7 @@ class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
|
||||
"""
|
||||
A scheduler optimised to complete .bb files are quickly as possible. The
|
||||
priority map is sorted by task weight, but then reordered so once a given
|
||||
.bb file starts to build, it's completed as quickly as possible. This works
|
||||
.bb file starts to build, its completed as quickly as possible. This works
|
||||
well where disk space is at a premium and classes like OE's rm_work are in
|
||||
force.
|
||||
"""
|
||||
@@ -430,7 +430,7 @@ class RunQueueData:
|
||||
# Nothing to do
|
||||
return 0
|
||||
|
||||
logger.info("Preparing RunQueue")
|
||||
logger.info("Preparing runqueue")
|
||||
|
||||
# Step A - Work out a list of tasks to run
|
||||
#
|
||||
@@ -795,7 +795,7 @@ class RunQueueData:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
|
||||
# Iterate over the task list and call into the siggen code
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
todeal = set(range(len(self.runq_fnid)))
|
||||
while len(todeal) > 0:
|
||||
@@ -859,18 +859,15 @@ class RunQueue:
|
||||
|
||||
def _start_worker(self, fakeroot = False, rqexec = None):
|
||||
logger.debug(1, "Starting bitbake-worker")
|
||||
magic = "decafbad"
|
||||
if self.cooker.configuration.profile:
|
||||
magic = "decafbadbad"
|
||||
if fakeroot:
|
||||
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
|
||||
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
|
||||
env = os.environ.copy()
|
||||
for key, value in (var.split('=') for var in fakerootenv):
|
||||
env[key] = value
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
|
||||
else:
|
||||
worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||
bb.utils.nonblockingfd(worker.stdout)
|
||||
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
|
||||
|
||||
@@ -879,7 +876,9 @@ class RunQueue:
|
||||
"fakerootenv" : self.rqdata.dataCache.fakerootenv,
|
||||
"fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
|
||||
"fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
|
||||
"sigdata" : bb.parse.siggen.get_taskdata(),
|
||||
"hashes" : bb.parse.siggen.taskhash,
|
||||
"hash_deps" : bb.parse.siggen.runtaskdeps,
|
||||
"sigchecksums" : bb.parse.siggen.file_checksum_values,
|
||||
"runq_hash" : self.rqdata.runq_hash,
|
||||
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
|
||||
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
|
||||
@@ -968,11 +967,11 @@ class RunQueue:
|
||||
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
|
||||
# If the stamp is missing, it's not current
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
logger.debug(2, "Stampfile %s not available", stampfile)
|
||||
return False
|
||||
# If it's a 'nostamp' task, it's not current
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
|
||||
@@ -1066,7 +1065,7 @@ class RunQueue:
|
||||
if self.state is runQueueCleanUp:
|
||||
self.rqexe.finish()
|
||||
|
||||
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
|
||||
if self.state is runQueueComplete or self.state is runQueueFailed:
|
||||
self.teardown_workers()
|
||||
if self.rqexe.stats.failed:
|
||||
logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
|
||||
@@ -1107,7 +1106,6 @@ class RunQueue:
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
self.state = runQueueComplete
|
||||
return
|
||||
|
||||
if now:
|
||||
@@ -1553,8 +1551,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
bb.event.fire(startevent, self.cfgData)
|
||||
self.runq_running[task] = 1
|
||||
self.stats.taskActive()
|
||||
if not self.cooker.configuration.dry_run:
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
|
||||
self.task_complete(task)
|
||||
return True
|
||||
else:
|
||||
@@ -1689,7 +1686,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
|
||||
process_endpoints(endpoints)
|
||||
|
||||
# Build a list of setscene tasks which are "unskippable"
|
||||
# Build a list of setscene tasks which as "unskippable"
|
||||
# These are direct endpoints referenced by the build
|
||||
endpoints2 = {}
|
||||
sq_revdeps2 = []
|
||||
@@ -1845,10 +1842,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
realtask = self.rqdata.runq_setscene[task]
|
||||
realdep = self.rqdata.runq_setscene[dep]
|
||||
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
|
||||
self.scenequeue_updatecounters(dep, fail)
|
||||
continue
|
||||
if task not in self.sq_revdeps2[dep]:
|
||||
# May already have been removed by the fail case above
|
||||
continue
|
||||
self.sq_revdeps2[dep].remove(task)
|
||||
if len(self.sq_revdeps2[dep]) == 0:
|
||||
@@ -1990,10 +1983,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
self.rq.state = runQueueRunInit
|
||||
|
||||
completeevent = sceneQueueComplete(self.stats, self.rq)
|
||||
bb.event.fire(completeevent, self.cfgData)
|
||||
|
||||
return True
|
||||
|
||||
def runqueue_process_waitpid(self, task, status):
|
||||
@@ -2046,7 +2035,7 @@ class sceneQueueEvent(runQueueEvent):
|
||||
|
||||
class runQueueTaskStarted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was started
|
||||
Event notifing a task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2054,7 +2043,7 @@ class runQueueTaskStarted(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task was started
|
||||
Event notifing a setscene task was started
|
||||
"""
|
||||
def __init__(self, task, stats, rq, noexec=False):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2062,7 +2051,7 @@ class sceneQueueTaskStarted(sceneQueueEvent):
|
||||
|
||||
class runQueueTaskFailed(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task failed
|
||||
Event notifing a task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
@@ -2070,33 +2059,25 @@ class runQueueTaskFailed(runQueueEvent):
|
||||
|
||||
class sceneQueueTaskFailed(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task failed
|
||||
Event notifing a setscene task failed
|
||||
"""
|
||||
def __init__(self, task, stats, exitcode, rq):
|
||||
sceneQueueEvent.__init__(self, task, stats, rq)
|
||||
self.exitcode = exitcode
|
||||
|
||||
class sceneQueueComplete(sceneQueueEvent):
|
||||
"""
|
||||
Event when all the sceneQueue tasks are complete
|
||||
"""
|
||||
def __init__(self, stats, rq):
|
||||
self.stats = stats.copy()
|
||||
bb.event.Event.__init__(self)
|
||||
|
||||
class runQueueTaskCompleted(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task completed
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
class sceneQueueTaskCompleted(sceneQueueEvent):
|
||||
"""
|
||||
Event notifying a setscene task completed
|
||||
Event notifing a setscene task completed
|
||||
"""
|
||||
|
||||
class runQueueTaskSkipped(runQueueEvent):
|
||||
"""
|
||||
Event notifying a task was skipped
|
||||
Event notifing a task was skipped
|
||||
"""
|
||||
def __init__(self, task, stats, rq, reason):
|
||||
runQueueEvent.__init__(self, task, stats, rq)
|
||||
|
||||
@@ -38,18 +38,14 @@ from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
class ServerCommunicator():
|
||||
def __init__(self, connection, event_handle, server):
|
||||
def __init__(self, connection, event_handle):
|
||||
self.connection = connection
|
||||
self.event_handle = event_handle
|
||||
self.server = server
|
||||
|
||||
def runCommand(self, command):
|
||||
# @todo try/except
|
||||
self.connection.send(command)
|
||||
|
||||
if not self.server.is_alive():
|
||||
raise SystemExit
|
||||
|
||||
while True:
|
||||
# don't let the user ctrl-c while we're waiting for a response
|
||||
try:
|
||||
@@ -143,8 +139,6 @@ class ProcessServer(Process, BaseImplServer):
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception('Running idle function')
|
||||
del self._idlefuns[function]
|
||||
self.quit = True
|
||||
|
||||
if nextsleep is not None:
|
||||
select.select(fds,[],[],nextsleep)
|
||||
@@ -164,7 +158,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
self.procserver = serverImpl
|
||||
self.ui_channel = ui_channel
|
||||
self.event_queue = event_queue
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
|
||||
self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
|
||||
self.events = self.event_queue
|
||||
|
||||
def sigterm_terminate(self):
|
||||
@@ -203,20 +197,14 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
raise KeyboardInterrupt()
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
|
||||
def getEvent(self):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
return None
|
||||
@@ -231,7 +219,6 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
self.ui_channel, self.server_channel = Pipe()
|
||||
self.event_queue = ProcessEventQueue(0)
|
||||
self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
|
||||
self.event_queue.server = self.serverImpl
|
||||
|
||||
def detach(self):
|
||||
self.serverImpl.start()
|
||||
|
||||
@@ -80,7 +80,7 @@ class BBTransport(xmlrpclib.Transport):
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
class BitBakeServerCommands():
|
||||
@@ -253,13 +253,9 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
socktimeout = self.socket.gettimeout() or nextsleep
|
||||
socktimeout = min(socktimeout, nextsleep)
|
||||
# Mirror what BaseServer handle_request would do
|
||||
try:
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
except IOError:
|
||||
# we ignore interrupted calls
|
||||
pass
|
||||
fd_sets = select.select(fds, [], [], socktimeout)
|
||||
if fd_sets[0] and self in fd_sets[0]:
|
||||
self._handle_request_noblock()
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in self._idlefuns.items():
|
||||
@@ -281,16 +277,13 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
|
||||
self.observer_only = observer_only
|
||||
self.featureset = featureset
|
||||
|
||||
def connect(self, token = None):
|
||||
if token is None:
|
||||
if self.observer_only:
|
||||
token = "observer"
|
||||
else:
|
||||
token = self.connection.addClient()
|
||||
|
||||
def connect(self):
|
||||
if not self.observer_only:
|
||||
token = self.connection.addClient()
|
||||
else:
|
||||
token = "observer"
|
||||
if token is None:
|
||||
return None
|
||||
|
||||
self.transport.set_connection_token(token)
|
||||
|
||||
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
|
||||
@@ -339,9 +332,7 @@ class BitBakeServer(BitBakeBaseServer):
|
||||
|
||||
class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
|
||||
def __init__(self, observer_only = False, token = None):
|
||||
self.token = token
|
||||
|
||||
def __init__(self, observer_only = False):
|
||||
self.observer_only = observer_only
|
||||
# if we need extra caches, just tell the server to load them all
|
||||
pass
|
||||
@@ -349,14 +340,37 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
def saveConnectionDetails(self, remote):
|
||||
self.remote = remote
|
||||
|
||||
def saveConnectionConfigParams(self, configParams):
|
||||
self.configParams = configParams
|
||||
|
||||
def establishConnection(self, featureset):
|
||||
# The format of "remote" must be "server:port"
|
||||
try:
|
||||
[host, port] = self.remote.split(":")
|
||||
port = int(port)
|
||||
except Exception as e:
|
||||
bb.warn("Failed to read remote definition (%s)" % str(e))
|
||||
raise e
|
||||
bb.fatal("Failed to read remote definition (%s)" % str(e))
|
||||
|
||||
# use automatic port if port set to -1, meaning read it from
|
||||
# the bitbake.lock file
|
||||
if port == -1:
|
||||
lock_location = "%s/bitbake.lock" % self.configParams.environment.get('BUILDDIR')
|
||||
lock = bb.utils.lockfile(lock_location, False, False)
|
||||
if lock:
|
||||
# This means there is no server running which we can
|
||||
# connect to on the local system.
|
||||
bb.utils.unlockfile(lock)
|
||||
return None
|
||||
|
||||
try:
|
||||
lf = open(lock_location, 'r')
|
||||
remotedef = lf.readline()
|
||||
[host, port] = remotedef.split(":")
|
||||
port = int(port)
|
||||
lf.close()
|
||||
self.remote = remotedef
|
||||
except Exception as e:
|
||||
bb.fatal("Failed to read bitbake.lock (%s)" % str(e))
|
||||
|
||||
# We need our IP for the server connection. We get the IP
|
||||
# by trying to connect with the server
|
||||
@@ -366,15 +380,13 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
ip = s.getsockname()[0]
|
||||
s.close()
|
||||
except Exception as e:
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
return self.connection.connect()
|
||||
except Exception as e:
|
||||
bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
bb.fatal("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
|
||||
|
||||
def endSession(self):
|
||||
self.connection.removeClient()
|
||||
|
||||
@@ -190,18 +190,16 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
logger.warn("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values = data
|
||||
def set_taskdata(self, hashes, deps, checksums):
|
||||
self.runtaskdeps = deps
|
||||
self.taskhash = hashes
|
||||
self.file_checksum_values = checksums
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
k = fn + "." + task
|
||||
@@ -295,9 +293,10 @@ def dump_this_task(outfile, d):
|
||||
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
|
||||
|
||||
def clean_basepath(a):
|
||||
b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
|
||||
if a.startswith("virtual:"):
|
||||
b = b + ":" + a.rsplit(":", 1)[0]
|
||||
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
||||
else:
|
||||
b = a.rsplit("/", 1)[1]
|
||||
return b
|
||||
|
||||
def clean_basepaths(a):
|
||||
@@ -306,12 +305,6 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def clean_basepaths_list(a):
|
||||
b = []
|
||||
for x in a:
|
||||
b.append(clean_basepath(x))
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
@@ -411,17 +404,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
changed = []
|
||||
for idx, task in enumerate(a_data['runtaskdeps']):
|
||||
a = a_data['runtaskdeps'][idx]
|
||||
b = b_data['runtaskdeps'][idx]
|
||||
if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
|
||||
changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
|
||||
|
||||
if changed:
|
||||
output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
|
||||
output.append("\n".join(changed))
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
@@ -498,17 +480,4 @@ def dump_sigfile(a):
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
data = a_data['basehash']
|
||||
for dep in a_data['runtaskdeps']:
|
||||
data = data + a_data['runtaskhashes'][dep]
|
||||
|
||||
for c in a_data['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in a_data:
|
||||
data = data + a_data['taint']
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
output.append("Computed Hash is %s" % h)
|
||||
|
||||
return output
|
||||
|
||||
@@ -259,13 +259,6 @@ class TestConcatOverride(unittest.TestCase):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "")
|
||||
|
||||
def test_remove_expansion(self):
|
||||
self.d.setVar("BAR", "Z")
|
||||
self.d.setVar("TEST", "${BAR}/X Y")
|
||||
self.d.setVar("TEST_remove", "${BAR}/X")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "Y")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
@@ -306,39 +299,3 @@ class TestFlags(unittest.TestCase):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
class Contains(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("SOMEFLAG", "a b c")
|
||||
|
||||
def test_contains(self):
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "a b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains("SOMEFLAG", "c b a", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains("SOMEFLAG", "x c b a", True, False, self.d))
|
||||
|
||||
def test_contains_any(self):
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a b", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "b c", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "c a", True, False, self.d))
|
||||
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "a x", True, False, self.d))
|
||||
self.assertTrue(bb.utils.contains_any("SOMEFLAG", "x c", True, False, self.d))
|
||||
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d))
|
||||
self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d))
|
||||
|
||||
@@ -24,7 +24,6 @@ import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
import bb
|
||||
|
||||
class URITest(unittest.TestCase):
|
||||
@@ -445,13 +444,6 @@ class FetcherLocalTest(FetcherTest):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e'])
|
||||
self.assertEqual(tree, ['dir/subdir/e'])
|
||||
|
||||
def test_local_subdirparam(self):
|
||||
tree = self.fetchUnpack(['file://a;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/a'])
|
||||
|
||||
def test_local_deepsubdirparam(self):
|
||||
tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
|
||||
self.assertEqual(tree, ['bar/dir/subdir/e'])
|
||||
|
||||
class FetcherNetworkTest(FetcherTest):
|
||||
|
||||
@@ -566,81 +558,5 @@ class URLHandle(unittest.TestCase):
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
class FetchMethodTest(FetcherTest):
|
||||
|
||||
test_git_uris = {
|
||||
# version pattern "X.Y.Z"
|
||||
("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
|
||||
: "1.99.4",
|
||||
# version pattern "vX.Y"
|
||||
("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
|
||||
: "1.5.0",
|
||||
# version pattern "pkg_name-X.Y"
|
||||
("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
|
||||
: "1.0",
|
||||
# version pattern "pkg_name-vX.Y.Z"
|
||||
("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
|
||||
: "1.4.0",
|
||||
# combination version pattern
|
||||
("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
|
||||
: "1.2.0",
|
||||
("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
|
||||
: "2014.01",
|
||||
# version pattern "yyyymmdd"
|
||||
("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
|
||||
: "20120614",
|
||||
# packages with a valid GITTAGREGEX
|
||||
("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
|
||||
: "0.4.3",
|
||||
("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
|
||||
: "11.0.0",
|
||||
("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
|
||||
: "1.3.59",
|
||||
("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
|
||||
: "3.82+dbg0.9",
|
||||
}
|
||||
|
||||
test_wget_uris = {
|
||||
# packages with versions inside directory name
|
||||
("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
|
||||
: "2.24.2",
|
||||
("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
|
||||
: "1.6.0",
|
||||
("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
|
||||
: "2.8.12.1",
|
||||
# packages with versions only in current directory
|
||||
("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
|
||||
: "2.19",
|
||||
("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
|
||||
: "20120814",
|
||||
# packages with "99" in the name of possible version
|
||||
("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
|
||||
: "5.0",
|
||||
("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
|
||||
: "1.15.1",
|
||||
# packages with valid REGEX_URI and REGEX
|
||||
("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
|
||||
: "2.0.0",
|
||||
("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
|
||||
: "6.1.19",
|
||||
}
|
||||
|
||||
def test_git_latest_versionstring(self):
|
||||
for k, v in self.test_git_uris.items():
|
||||
self.d.setVar("SRCREV", k[2])
|
||||
self.d.setVar("GITTAGREGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
print("Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0)
|
||||
|
||||
def test_wget_latest_versionstring(self):
|
||||
for k, v in self.test_wget_uris.items():
|
||||
self.d.setVar("REGEX_URI", k[2])
|
||||
self.d.setVar("REGEX", k[3])
|
||||
ud = bb.fetch2.FetchData(k[1], self.d)
|
||||
verstring = ud.method.latest_versionstring(ud, self.d)
|
||||
print("Package %s, version: %s <= %s" % (k[0], v, verstring))
|
||||
r = bb.utils.vercmp_string(v, verstring)
|
||||
self.assertTrue(r == -1 or r == 0)
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
import os
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
@@ -51,3 +52,52 @@ class VerCmpString(unittest.TestCase):
|
||||
result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
|
||||
self.assertEqual(result, correctresult)
|
||||
|
||||
def test_vercmp_string_op(self):
|
||||
compareops = [('1', '1', '=', True),
|
||||
('1', '1', '==', True),
|
||||
('1', '1', '!=', False),
|
||||
('1', '1', '>', False),
|
||||
('1', '1', '<', False),
|
||||
('1', '1', '>=', True),
|
||||
('1', '1', '<=', True),
|
||||
('1', '0', '=', False),
|
||||
('1', '0', '==', False),
|
||||
('1', '0', '!=', True),
|
||||
('1', '0', '>', True),
|
||||
('1', '0', '<', False),
|
||||
('1', '0', '>>', True),
|
||||
('1', '0', '<<', False),
|
||||
('1', '0', '>=', True),
|
||||
('1', '0', '<=', False),
|
||||
('0', '1', '=', False),
|
||||
('0', '1', '==', False),
|
||||
('0', '1', '!=', True),
|
||||
('0', '1', '>', False),
|
||||
('0', '1', '<', True),
|
||||
('0', '1', '>>', False),
|
||||
('0', '1', '<<', True),
|
||||
('0', '1', '>=', False),
|
||||
('0', '1', '<=', True)]
|
||||
|
||||
for arg1, arg2, op, correctresult in compareops:
|
||||
result = bb.utils.vercmp_string_op(arg1, arg2, op)
|
||||
self.assertEqual(result, correctresult, 'vercmp_string_op("%s", "%s", "%s") != %s' % (arg1, arg2, op, correctresult))
|
||||
|
||||
# Check that clearly invalid operator raises an exception
|
||||
self.assertRaises(bb.utils.VersionStringException, bb.utils.vercmp_string_op, '0', '0', '$')
|
||||
|
||||
|
||||
class Path(unittest.TestCase):
|
||||
def test_unsafe_delete_path(self):
|
||||
checkitems = [('/', True),
|
||||
('//', True),
|
||||
('///', True),
|
||||
(os.getcwd().count(os.sep) * ('..' + os.sep), True),
|
||||
(os.environ.get('HOME', '/home/test'), True),
|
||||
('/home/someone', True),
|
||||
('/home/other/', True),
|
||||
('/home/other/subdir', False),
|
||||
('', False)]
|
||||
for arg1, correctresult in checkitems:
|
||||
result = bb.utils._check_unsafe_delete_path(arg1)
|
||||
self.assertEqual(result, correctresult, '_check_unsafe_delete_path("%s") != %s' % (arg1, correctresult))
|
||||
|
||||
@@ -25,12 +25,12 @@ import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state, BBCooker, CookerFeatures
|
||||
from bb.cooker import state, BBCooker
|
||||
from bb.cookerdata import CookerConfiguration, ConfigParameters
|
||||
import bb.fetch2
|
||||
|
||||
class Tinfoil:
|
||||
def __init__(self, output=sys.stdout, tracking=False):
|
||||
def __init__(self, output=sys.stdout):
|
||||
# Needed to avoid deprecation warnings with python 2.6
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
@@ -48,10 +48,7 @@ class Tinfoil:
|
||||
configparams = TinfoilConfigParameters(parse_only=True)
|
||||
self.config.setConfigParameters(configparams)
|
||||
self.config.setServerRegIdleCallback(self.register_idle_function)
|
||||
features = []
|
||||
if tracking:
|
||||
features.append(CookerFeatures.BASEDATASTORE_TRACKING)
|
||||
self.cooker = BBCooker(self.config, features)
|
||||
self.cooker = BBCooker(self.config)
|
||||
self.config_data = self.cooker.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
|
||||
@@ -22,7 +22,7 @@ import bb
|
||||
import re
|
||||
import ast
|
||||
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toaster.toastermain.settings")
|
||||
|
||||
import toaster.toastermain.settings as toaster_django_settings
|
||||
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
||||
@@ -32,7 +32,6 @@ from toaster.orm.models import Package, Package_File, Target_Installed_Package,
|
||||
from toaster.orm.models import Task_Dependency, Package_Dependency
|
||||
from toaster.orm.models import Recipe_Dependency
|
||||
from bb.msg import BBLogFormatter as format
|
||||
from django.db import models
|
||||
|
||||
class NotExisting(Exception):
|
||||
pass
|
||||
@@ -44,59 +43,10 @@ class ORMWrapper(object):
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.layer_version_objects = []
|
||||
self.task_objects = {}
|
||||
self.recipe_objects = {}
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def _build_key(**kwargs):
|
||||
key = "0"
|
||||
for k in sorted(kwargs.keys()):
|
||||
if isinstance(kwargs[k], models.Model):
|
||||
key += "-%d" % kwargs[k].id
|
||||
else:
|
||||
key += "-%s" % str(kwargs[k])
|
||||
return key
|
||||
|
||||
|
||||
def _cached_get_or_create(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get_or_create. We assume that the objects will not be created in the
|
||||
database through any other means.
|
||||
"""
|
||||
|
||||
assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
created = False
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.create(**kwargs)
|
||||
created = True
|
||||
|
||||
return (vars(self)[dictname][key], created)
|
||||
|
||||
|
||||
def _cached_get(self, clazz, **kwargs):
|
||||
""" This is a memory-cached get. We assume that the objects will not change in the database between gets.
|
||||
"""
|
||||
assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
|
||||
|
||||
key = ORMWrapper._build_key(**kwargs)
|
||||
dictname = "objects_%s" % clazz.__name__
|
||||
|
||||
if not dictname in vars(self).keys():
|
||||
vars(self)[dictname] = {}
|
||||
|
||||
if not key in vars(self)[dictname].keys():
|
||||
vars(self)[dictname][key] = clazz.objects.get(**kwargs)
|
||||
|
||||
return vars(self)[dictname][key]
|
||||
|
||||
def create_build_object(self, build_info, brbe):
|
||||
def create_build_object(self, build_info):
|
||||
assert 'machine' in build_info
|
||||
assert 'distro' in build_info
|
||||
assert 'distro_version' in build_info
|
||||
@@ -115,16 +65,6 @@ class ORMWrapper(object):
|
||||
build_name=build_info['build_name'],
|
||||
bitbake_version=build_info['bitbake_version'])
|
||||
|
||||
if brbe is not None:
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
buildrequest.build = build
|
||||
buildrequest.save()
|
||||
|
||||
build.project_id = buildrequest.project_id
|
||||
build.save()
|
||||
return build
|
||||
|
||||
def create_target_objects(self, target_info):
|
||||
@@ -136,7 +76,7 @@ class ORMWrapper(object):
|
||||
tgt_object = Target.objects.create( build = target_info['build'],
|
||||
target = tgt_name,
|
||||
is_image = False,
|
||||
)
|
||||
);
|
||||
targets.append(tgt_object)
|
||||
return targets
|
||||
|
||||
@@ -166,47 +106,39 @@ class ORMWrapper(object):
|
||||
assert 'recipe' in task_information
|
||||
assert 'task_name' in task_information
|
||||
|
||||
# we use must_exist info for database look-up optimization
|
||||
task_object, created = self._cached_get_or_create(Task,
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name']
|
||||
)
|
||||
if created and must_exist:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
task_object, created = Task.objects.get_or_create(
|
||||
build=task_information['build'],
|
||||
recipe=task_information['recipe'],
|
||||
task_name=task_information['task_name'],
|
||||
)
|
||||
|
||||
if must_exist and created:
|
||||
task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
|
||||
task_object.delete()
|
||||
raise NotExisting("Task object created when expected to exist", task_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(task_object):
|
||||
if v in task_information.keys():
|
||||
if vars(task_object)[v] != task_information[v]:
|
||||
vars(task_object)[v] = task_information[v]
|
||||
object_changed = True
|
||||
vars(task_object)[v] = task_information[v]
|
||||
|
||||
# update setscene-related information if the task was just created
|
||||
if created and task_object.outcome == Task.OUTCOME_COVERED and 1 == Task.objects.related_setscene(task_object).count():
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
object_changed = True
|
||||
# update setscene-related information
|
||||
if 1 == Task.objects.related_setscene(task_object).count():
|
||||
if task_object.outcome == Task.OUTCOME_COVERED:
|
||||
task_object.outcome = Task.OUTCOME_CACHED
|
||||
|
||||
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
|
||||
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
|
||||
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
|
||||
task_object.sstate_result = Task.SSTATE_RESTORED
|
||||
object_changed = True
|
||||
elif outcome_task_setscene == Task.OUTCOME_FAILED:
|
||||
task_object.sstate_result = Task.SSTATE_FAILED
|
||||
object_changed = True
|
||||
|
||||
# mark down duration if we have a start time and a current time
|
||||
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
|
||||
duration = task_information['end_time'] - task_information['start_time']
|
||||
task_object.elapsed_time = duration
|
||||
object_changed = True
|
||||
del task_information['start_time']
|
||||
del task_information['end_time']
|
||||
|
||||
if object_changed:
|
||||
task_object.save()
|
||||
task_object.save()
|
||||
return task_object
|
||||
|
||||
|
||||
@@ -214,19 +146,20 @@ class ORMWrapper(object):
|
||||
assert 'layer_version' in recipe_information
|
||||
assert 'file_path' in recipe_information
|
||||
|
||||
recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
if created and must_exist:
|
||||
|
||||
recipe_object, created = Recipe.objects.get_or_create(
|
||||
layer_version=recipe_information['layer_version'],
|
||||
file_path=recipe_information['file_path'])
|
||||
|
||||
if must_exist and created:
|
||||
recipe_object.delete()
|
||||
raise NotExisting("Recipe object created when expected to exist", recipe_information)
|
||||
|
||||
object_changed = False
|
||||
for v in vars(recipe_object):
|
||||
if v in recipe_information.keys():
|
||||
object_changed = True
|
||||
vars(recipe_object)[v] = recipe_information[v]
|
||||
|
||||
if object_changed:
|
||||
recipe_object.save()
|
||||
recipe_object.save()
|
||||
|
||||
return recipe_object
|
||||
|
||||
@@ -245,34 +178,19 @@ class ORMWrapper(object):
|
||||
priority = layer_version_information['priority']
|
||||
)
|
||||
|
||||
self.layer_version_objects.append(layer_version_object)
|
||||
|
||||
return layer_version_object
|
||||
|
||||
def get_update_layer_object(self, layer_information, brbe):
|
||||
def get_update_layer_object(self, layer_information):
|
||||
assert 'name' in layer_information
|
||||
assert 'local_path' in layer_information
|
||||
assert 'layer_index_url' in layer_information
|
||||
|
||||
if brbe is None:
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
layer_object, created = Layer.objects.get_or_create(
|
||||
name=layer_information['name'],
|
||||
local_path=layer_information['local_path'],
|
||||
layer_index_url=layer_information['layer_index_url'])
|
||||
return layer_object
|
||||
else:
|
||||
# we are under managed mode; we must match the layer used in the Project Layer
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
br, be = brbe.split(":")
|
||||
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
|
||||
# we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
|
||||
# but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
|
||||
layer_object = buildrequest.project.projectlayer_set.get(layercommit__layer__name=layer_information['name']).layercommit.layer
|
||||
|
||||
return layer_object
|
||||
|
||||
return layer_object
|
||||
|
||||
def save_target_file_information(self, build_obj, target_obj, filedata):
|
||||
assert isinstance(build_obj, Build)
|
||||
@@ -304,7 +222,7 @@ class ORMWrapper(object):
|
||||
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
|
||||
if len(parent_path) == 0:
|
||||
parent_path = "/"
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
@@ -338,7 +256,7 @@ class ORMWrapper(object):
|
||||
permission = permission,
|
||||
owner = user,
|
||||
group = group)
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj.directory = parent_obj
|
||||
tf_obj.save()
|
||||
|
||||
@@ -393,7 +311,8 @@ class ORMWrapper(object):
|
||||
searchname = pkgpnmap[p]['OPKGN']
|
||||
|
||||
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
|
||||
if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
|
||||
if created:
|
||||
# package was not build in the current build, but
|
||||
# fill in everything we can from the runtime-reverse package data
|
||||
try:
|
||||
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
|
||||
@@ -407,14 +326,11 @@ class ORMWrapper(object):
|
||||
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
|
||||
|
||||
# no files recorded for this package, so save files info
|
||||
packagefile_objects = []
|
||||
for targetpath in pkgpnmap[p]['FILES_INFO']:
|
||||
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
|
||||
packagefile_objects.append(Package_File( package = packagedict[p]['object'],
|
||||
Package_File.objects.create( package = packagedict[p]['object'],
|
||||
path = targetpath,
|
||||
size = targetfilesize))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = targetfilesize)
|
||||
except KeyError as e:
|
||||
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
|
||||
|
||||
@@ -424,7 +340,6 @@ class ORMWrapper(object):
|
||||
|
||||
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
|
||||
|
||||
packagedeps_objs = []
|
||||
for p in packagedict:
|
||||
for (px,deptype) in packagedict[p]['depends']:
|
||||
if deptype == 'depends':
|
||||
@@ -432,13 +347,10 @@ class ORMWrapper(object):
|
||||
elif deptype == 'recommends':
|
||||
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
|
||||
|
||||
packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
|
||||
Package_Dependency.objects.create( package = packagedict[p]['object'],
|
||||
depends_on = packagedict[px]['object'],
|
||||
dep_type = tdeptype,
|
||||
target = target_obj))
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
target = target_obj);
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
raise Exception(errormsg)
|
||||
@@ -489,13 +401,10 @@ class ORMWrapper(object):
|
||||
bp_object.save()
|
||||
|
||||
# save any attached file information
|
||||
packagefile_objects = []
|
||||
for path in package_info['FILES_INFO']:
|
||||
packagefile_objects.append(Package_File( package = bp_object,
|
||||
fo = Package_File.objects.create( package = bp_object,
|
||||
path = path,
|
||||
size = package_info['FILES_INFO'][path] ))
|
||||
if len(packagefile_objects):
|
||||
Package_File.objects.bulk_create(packagefile_objects)
|
||||
size = package_info['FILES_INFO'][path] )
|
||||
|
||||
def _po_byname(p):
|
||||
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
|
||||
@@ -504,45 +413,39 @@ class ORMWrapper(object):
|
||||
pkg.save()
|
||||
return pkg
|
||||
|
||||
packagedeps_objs = []
|
||||
# save soft dependency information
|
||||
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
|
||||
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
|
||||
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
|
||||
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
|
||||
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
|
||||
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
|
||||
if 'RREPLACES' in package_info and package_info['RREPLACES']:
|
||||
for p in bb.utils.explode_deps(package_info['RREPLACES']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
|
||||
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
|
||||
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
|
||||
packagedeps_objs.append(Package_Dependency( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
Package_Dependency.objects.get_or_create( package = bp_object,
|
||||
depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
|
||||
|
||||
return bp_object
|
||||
|
||||
def save_build_variables(self, build_obj, vardump):
|
||||
assert isinstance(build_obj, Build)
|
||||
|
||||
helptext_objects = []
|
||||
|
||||
for k in vardump:
|
||||
desc = vardump[k]['doc']
|
||||
desc = vardump[k]['doc'];
|
||||
if desc is None:
|
||||
var_words = [word for word in k.split('_')]
|
||||
root_var = "_".join([word for word in var_words if word.isupper()])
|
||||
@@ -550,33 +453,25 @@ class ORMWrapper(object):
|
||||
desc = vardump[root_var]['doc']
|
||||
if desc is None:
|
||||
desc = ''
|
||||
if len(desc):
|
||||
helptext_objects.append(HelpText(build=build_obj,
|
||||
if desc:
|
||||
helptext_obj = HelpText.objects.create(build=build_obj,
|
||||
area=HelpText.VARIABLE,
|
||||
key=k,
|
||||
text=desc))
|
||||
text=desc)
|
||||
if not bool(vardump[k]['func']):
|
||||
value = vardump[k]['v']
|
||||
value = vardump[k]['v'];
|
||||
if value is None:
|
||||
value = ''
|
||||
variable_obj = Variable.objects.create( build = build_obj,
|
||||
variable_name = k,
|
||||
variable_value = value,
|
||||
description = desc)
|
||||
|
||||
varhist_objects = []
|
||||
for vh in vardump[k]['history']:
|
||||
if not 'documentation.conf' in vh['file']:
|
||||
varhist_objects.append(VariableHistory( variable = variable_obj,
|
||||
VariableHistory.objects.create( variable = variable_obj,
|
||||
file_name = vh['file'],
|
||||
line_number = vh['line'],
|
||||
operation = vh['op']))
|
||||
if len(varhist_objects):
|
||||
VariableHistory.objects.bulk_create(varhist_objects)
|
||||
|
||||
HelpText.objects.bulk_create(helptext_objects)
|
||||
|
||||
class MockEvent: pass # sometimes we mock an event, declare it here
|
||||
operation = vh['op'])
|
||||
|
||||
class BuildInfoHelper(object):
|
||||
""" This class gathers the build information from the server and sends it
|
||||
@@ -585,7 +480,6 @@ class BuildInfoHelper(object):
|
||||
Keeps in memory all data that needs matching before writing it to the database
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, server, has_build_history = False):
|
||||
self._configure_django()
|
||||
self.internal_state = {}
|
||||
@@ -595,8 +489,6 @@ class BuildInfoHelper(object):
|
||||
self.orm_wrapper = ORMWrapper()
|
||||
self.has_build_history = has_build_history
|
||||
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
|
||||
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
|
||||
|
||||
|
||||
def _configure_django(self):
|
||||
# Add toaster to sys path for importing modules
|
||||
@@ -646,7 +538,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
# Heuristics: we always match recipe to the deepest layer path that
|
||||
# we can match to the recipe file path
|
||||
for bl in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey):
|
||||
for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
|
||||
if (path.startswith(bl.layer.local_path)):
|
||||
return bl
|
||||
|
||||
@@ -695,31 +587,20 @@ class BuildInfoHelper(object):
|
||||
|
||||
################################
|
||||
## external available methods to store information
|
||||
@staticmethod
|
||||
def _get_data_from_event(event):
|
||||
evdata = None
|
||||
if '_localdata' in vars(event):
|
||||
evdata = event._localdata
|
||||
elif 'data' in vars(event):
|
||||
evdata = event.data
|
||||
else:
|
||||
raise Exception("Event with neither _localdata or data properties")
|
||||
return evdata
|
||||
|
||||
def store_layer_info(self, event):
|
||||
layerinfos = BuildInfoHelper._get_data_from_event(event)
|
||||
assert 'data' in vars(event)
|
||||
layerinfos = event.data
|
||||
self.internal_state['lvs'] = {}
|
||||
for layer in layerinfos:
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
|
||||
|
||||
|
||||
def store_started_build(self, event):
|
||||
assert '_pkgs' in vars(event)
|
||||
assert 'lvs' in self.internal_state, "Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass."
|
||||
build_information = self._get_build_information()
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe)
|
||||
|
||||
build_obj = self.orm_wrapper.create_build_object(build_information)
|
||||
self.internal_state['build'] = build_obj
|
||||
|
||||
# save layer version information for this build
|
||||
@@ -738,19 +619,14 @@ class BuildInfoHelper(object):
|
||||
# Save build configuration
|
||||
self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
|
||||
|
||||
return self.brbe
|
||||
|
||||
|
||||
def update_target_image_file(self, event):
|
||||
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
|
||||
evdata = BuildInfoHelper._get_data_from_event(event)
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
output_files = list(evdata.viewkeys())
|
||||
output_files = list(event.data.viewkeys())
|
||||
for output in output_files:
|
||||
if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, event.data[output])
|
||||
|
||||
def update_build_information(self, event, errors, warnings, taskfailures):
|
||||
if 'build' in self.internal_state:
|
||||
@@ -758,8 +634,8 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_license_manifest_path(self, event):
|
||||
deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
|
||||
image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
|
||||
deploy_dir = event.data['deploy_dir']
|
||||
image_name = event.data['image_name']
|
||||
path = deploy_dir + "/licenses/" + image_name + "/"
|
||||
for target in self.internal_state['targets']:
|
||||
if target.target in image_name:
|
||||
@@ -807,7 +683,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_tasks_stats(self, event):
|
||||
for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
|
||||
for (taskfile, taskname, taskstats, recipename) in event.data:
|
||||
localfilepath = taskfile.split(":")[-1]
|
||||
assert localfilepath.startswith("/")
|
||||
|
||||
@@ -822,8 +698,6 @@ class BuildInfoHelper(object):
|
||||
task_information['task_name'] = taskname
|
||||
task_information['cpu_usage'] = taskstats['cpu_usage']
|
||||
task_information['disk_io'] = taskstats['disk_io']
|
||||
if 'elapsed_time' in taskstats:
|
||||
task_information['elapsed_time'] = taskstats['elapsed_time']
|
||||
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
|
||||
|
||||
def update_and_store_task(self, event):
|
||||
@@ -881,11 +755,12 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_missed_state_tasks(self, event):
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event.data['missed']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
||||
class MockEvent: pass
|
||||
mevent = MockEvent()
|
||||
mevent.taskname = taskname
|
||||
mevent.taskhash = taskhash
|
||||
@@ -899,11 +774,12 @@ class BuildInfoHelper(object):
|
||||
|
||||
self.orm_wrapper.get_update_task_object(task_information)
|
||||
|
||||
for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
|
||||
for (fn, taskname, taskhash, sstatefile) in event.data['found']:
|
||||
|
||||
identifier = fn + taskname + "_setscene"
|
||||
recipe_information = self._get_recipe_information_from_taskfile(fn)
|
||||
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
|
||||
class MockEvent: pass
|
||||
mevent = MockEvent()
|
||||
mevent.taskname = taskname
|
||||
mevent.taskhash = taskhash
|
||||
@@ -915,14 +791,15 @@ class BuildInfoHelper(object):
|
||||
|
||||
|
||||
def store_target_package_data(self, event):
|
||||
assert 'data' in vars(event)
|
||||
# for all image targets
|
||||
for target in self.internal_state['targets']:
|
||||
if target.is_image:
|
||||
try:
|
||||
pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
|
||||
imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
|
||||
pkgdata = event.data['pkgdata']
|
||||
imgdata = event.data['imgdata'][target.target]
|
||||
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
|
||||
filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
|
||||
filedata = event.data['filedata'][target.target]
|
||||
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
|
||||
except KeyError:
|
||||
# we must have not got the data for this image, nothing to save
|
||||
@@ -987,22 +864,20 @@ class BuildInfoHelper(object):
|
||||
|
||||
# save recipe dependency
|
||||
# buildtime
|
||||
recipedeps_objects = []
|
||||
for recipe in event._depgraph['depends']:
|
||||
try:
|
||||
target = self.internal_state['recipes'][recipe]
|
||||
for dep in event._depgraph['depends'][recipe]:
|
||||
dependency = self.internal_state['recipes'][dep]
|
||||
recipedeps_objects.append(Recipe_Dependency( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
|
||||
Recipe_Dependency.objects.get_or_create( recipe = target,
|
||||
depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
|
||||
except KeyError as e:
|
||||
if e not in assume_provided and not str(e).startswith("virtual/"):
|
||||
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
|
||||
Recipe_Dependency.objects.bulk_create(recipedeps_objects)
|
||||
|
||||
# save all task information
|
||||
def _save_a_task(taskdesc):
|
||||
spec = re.split(r'\.', taskdesc)
|
||||
spec = re.split(r'\.', taskdesc);
|
||||
pn = ".".join(spec[0:-1])
|
||||
taskname = spec[-1]
|
||||
e = event
|
||||
@@ -1019,7 +894,6 @@ class BuildInfoHelper(object):
|
||||
tasks[taskdesc] = _save_a_task(taskdesc)
|
||||
|
||||
# create dependencies between tasks
|
||||
taskdeps_objects = []
|
||||
for taskdesc in event._depgraph['tdepends']:
|
||||
target = tasks[taskdesc]
|
||||
for taskdep in event._depgraph['tdepends'][taskdesc]:
|
||||
@@ -1028,100 +902,63 @@ class BuildInfoHelper(object):
|
||||
dep = _save_a_task(taskdep)
|
||||
else:
|
||||
dep = tasks[taskdep]
|
||||
taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
|
||||
Task_Dependency.objects.bulk_create(taskdeps_objects)
|
||||
Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
|
||||
|
||||
if (len(errormsg) > 0):
|
||||
raise Exception(errormsg)
|
||||
|
||||
|
||||
def store_build_package_information(self, event):
|
||||
package_info = BuildInfoHelper._get_data_from_event(event)
|
||||
assert 'data' in vars(event)
|
||||
package_info = event.data
|
||||
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
|
||||
package_info,
|
||||
self.internal_state['recipes'],
|
||||
)
|
||||
|
||||
def _store_build_done(self, errorcode):
|
||||
br_id, be_id = self.brbe.split(":")
|
||||
from bldcontrol.models import BuildEnvironment, BuildRequest
|
||||
be = BuildEnvironment.objects.get(pk = be_id)
|
||||
be.lock = BuildEnvironment.LOCK_LOCK
|
||||
be.save()
|
||||
br = BuildRequest.objects.get(pk = br_id)
|
||||
if errorcode == 0:
|
||||
br.state = BuildRequest.REQ_COMPLETED
|
||||
else:
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
def _store_log_information(self, level, text):
|
||||
log_information = {}
|
||||
log_information['build'] = self.internal_state['build']
|
||||
log_information['level'] = level
|
||||
log_information['message'] = text
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def store_log_info(self, text):
|
||||
self._store_log_information(LogMessage.INFO, text)
|
||||
|
||||
def store_log_warn(self, text):
|
||||
self._store_log_information(LogMessage.WARNING, text)
|
||||
|
||||
def store_log_error(self, text):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = format.ERROR
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = '-- None'
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
def store_log_exception(self, text, backtrace = ""):
|
||||
mockevent = MockEvent()
|
||||
mockevent.levelno = -1
|
||||
mockevent.msg = text
|
||||
mockevent.pathname = backtrace
|
||||
mockevent.lineno = -1
|
||||
self.store_log_event(mockevent)
|
||||
|
||||
self._store_log_information(LogMessage.ERROR, text)
|
||||
|
||||
def store_log_event(self, event):
|
||||
if event.levelno < format.WARNING:
|
||||
return
|
||||
|
||||
if 'args' in vars(event):
|
||||
event.msg = event.msg % event.args
|
||||
|
||||
if not 'build' in self.internal_state:
|
||||
if self.brbe is None:
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
else: # we're under Toaster control, post the errors to the build request
|
||||
from bldcontrol.models import BuildRequest, BRError
|
||||
br, be = self.brbe.split(":")
|
||||
buildrequest = BuildRequest.objects.get(pk = br)
|
||||
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
|
||||
|
||||
return
|
||||
|
||||
if 'build' in self.internal_state and 'backlog' in self.internal_state:
|
||||
if len(self.internal_state['backlog']):
|
||||
tempevent = self.internal_state['backlog'].pop()
|
||||
print "DEBUG: Saving stored event ", tempevent
|
||||
print "Saving stored event ", tempevent
|
||||
self.store_log_event(tempevent)
|
||||
else:
|
||||
print "ERROR: Events not saved: \n", self.internal_state['backlog']
|
||||
del self.internal_state['backlog']
|
||||
|
||||
if event.levelno < format.WARNING:
|
||||
return
|
||||
|
||||
if not 'build' in self.internal_state:
|
||||
print "Save event for later"
|
||||
if not 'backlog' in self.internal_state:
|
||||
self.internal_state['backlog'] = []
|
||||
self.internal_state['backlog'].append(event)
|
||||
|
||||
return
|
||||
log_information = {}
|
||||
log_information['build'] = self.internal_state['build']
|
||||
if event.levelno == format.ERROR:
|
||||
if event.levelno >= format.ERROR:
|
||||
log_information['level'] = LogMessage.ERROR
|
||||
elif event.levelno == format.WARNING:
|
||||
log_information['level'] = LogMessage.WARNING
|
||||
elif event.levelno == -1: # toaster self-logging
|
||||
log_information['level'] = -1
|
||||
else:
|
||||
log_information['level'] = LogMessage.INFO
|
||||
|
||||
log_information['message'] = event.msg
|
||||
log_information['pathname'] = event.pathname
|
||||
log_information['lineno'] = event.lineno
|
||||
self.orm_wrapper.create_logmessage(log_information)
|
||||
|
||||
def close(self, errorcode):
|
||||
if self.brbe is not None:
|
||||
self._store_build_done(errorcode)
|
||||
|
||||
if 'backlog' in self.internal_state:
|
||||
for event in self.internal_state['backlog']:
|
||||
print "NOTE: Unsaved log: ", event.msg
|
||||
|
||||
@@ -230,7 +230,10 @@ class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
|
||||
self.configuration.sstatemirror = ""
|
||||
for mirror in self.sstatemirrors_list:
|
||||
if mirror[1] != "" and mirror[2].startswith("file://"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
if mirror[1].endswith("\\1"):
|
||||
smirror = mirror[2] + " " + mirror[1] + " \\n "
|
||||
else:
|
||||
smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
|
||||
self.configuration.sstatemirror += smirror
|
||||
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
|
||||
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
|
||||
|
||||
@@ -198,7 +198,7 @@ def main(server, eventHandler, params):
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
if 'msg' in cmdline and cmdline['msg']:
|
||||
print(cmdline['msg'])
|
||||
logger.error(cmdline['msg'])
|
||||
return 1
|
||||
cmdline = cmdline['action']
|
||||
if not cmdline or cmdline[0] != "generateDotGraph":
|
||||
@@ -236,7 +236,7 @@ def main(server, eventHandler, params):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
break
|
||||
|
||||
@@ -284,7 +284,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
|
||||
if not params.observe_only:
|
||||
params.updateFromServer(server)
|
||||
params.updateToServer(server)
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
@@ -322,8 +321,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
break
|
||||
termfilter.updateFooter()
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if event is None:
|
||||
continue
|
||||
if event is None:
|
||||
continue
|
||||
helper.eventHandler(event)
|
||||
if isinstance(event, bb.runqueue.runQueueExitWait):
|
||||
if not main.shutdown:
|
||||
@@ -352,7 +351,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
# For "normal" logging conditions, don't show note logs from tasks
|
||||
# but do show them if the user has changed the default log level to
|
||||
# include verbose/debug messages
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE and (event.levelno < llevel or (event.levelno == format.NOTE and llevel != format.VERBOSE)):
|
||||
if event.taskpid != 0 and event.levelno <= format.NOTE:
|
||||
continue
|
||||
logger.handle(event)
|
||||
continue
|
||||
@@ -508,11 +507,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
termfilter.clearFooter()
|
||||
# ignore interrupted io
|
||||
if ioerror.args[0] == 4:
|
||||
continue
|
||||
sys.stderr.write(str(ioerror))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
termfilter.clearFooter()
|
||||
if params.observe_only:
|
||||
@@ -531,11 +526,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
logger.error("Unable to cleanly shutdown: %s" % error)
|
||||
main.shutdown = main.shutdown + 1
|
||||
pass
|
||||
except Exception as e:
|
||||
sys.stderr.write(str(e))
|
||||
if not params.observe_only:
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
main.shutdown = 2
|
||||
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
|
||||
@@ -91,16 +91,13 @@ def main(server, eventHandler, params ):
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
first = True
|
||||
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if first:
|
||||
first = False
|
||||
logger.info("ToasterUI waiting for events")
|
||||
|
||||
if event is None:
|
||||
if main.shutdown > 0:
|
||||
@@ -219,33 +216,21 @@ def main(server, eventHandler, params ):
|
||||
if isinstance(event, (bb.command.CommandCompleted,
|
||||
bb.command.CommandFailed,
|
||||
bb.command.CommandExit)):
|
||||
errorcode = 0
|
||||
if (isinstance(event, bb.command.CommandFailed)):
|
||||
event.levelno = format.ERROR
|
||||
event.msg = "Command Failed " + event.error
|
||||
event.msg = event.error
|
||||
event.pathname = ""
|
||||
event.lineno = 0
|
||||
buildinfohelper.store_log_event(event)
|
||||
errors += 1
|
||||
errorcode = 1
|
||||
|
||||
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
|
||||
buildinfohelper.close(errorcode)
|
||||
# mark the log output; controllers may kill the toasterUI after seeing this log
|
||||
logger.info("ToasterUI build done")
|
||||
|
||||
# we start a new build info
|
||||
if buildinfohelper.brbe is not None:
|
||||
|
||||
logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
|
||||
server.terminateServer()
|
||||
else:
|
||||
logger.debug(1, "ToasterUI prepared for new build")
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
|
||||
errors = 0
|
||||
warnings = 0
|
||||
taskfailures = []
|
||||
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.MetadataEvent):
|
||||
@@ -295,17 +280,9 @@ def main(server, eventHandler, params ):
|
||||
main.shutdown = 1
|
||||
pass
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
logger.error(e)
|
||||
import traceback
|
||||
exception_data = traceback.format_exc()
|
||||
logger.error("%s\n%s" % (e, exception_data))
|
||||
|
||||
# save them to database, if possible; if it fails, we already logged to console.
|
||||
try:
|
||||
buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
|
||||
except Exception as ce:
|
||||
print("CRITICAL: failed to to save toaster exception to the database: %s" % str(ce))
|
||||
|
||||
traceback.print_exc()
|
||||
pass
|
||||
|
||||
if interrupted:
|
||||
|
||||
@@ -44,27 +44,10 @@ class BBUIEventQueue:
|
||||
server.register_function( self.send_event, "event.sendpickle" )
|
||||
server.socket.settimeout(1)
|
||||
|
||||
self.EventHandler = None
|
||||
count_tries = 0
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
# the event handler registration may fail here due to cooker being in invalid state
|
||||
# this is a transient situation, and we should retry a couple of times before
|
||||
# giving up
|
||||
|
||||
while self.EventHandler == None and count_tries < 5:
|
||||
self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
|
||||
|
||||
if (self.EventHandle != None):
|
||||
break
|
||||
|
||||
bb.warn("Could not register UI event handler %s:%d, retry" % (self.host, self.port))
|
||||
count_tries += 1
|
||||
import time
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if self.EventHandle == None:
|
||||
raise Exception("Could not register UI event handler")
|
||||
if (self.EventHandle == None):
|
||||
bb.fatal("Could not register UI event handler")
|
||||
|
||||
self.server = server
|
||||
|
||||
|
||||
@@ -264,7 +264,7 @@ def _print_trace(body, line):
|
||||
def better_compile(text, file, realfile, mode = "exec"):
|
||||
"""
|
||||
A better compile method. This method
|
||||
will print the offending lines.
|
||||
will print the offending lines.
|
||||
"""
|
||||
try:
|
||||
return compile(text, file, mode)
|
||||
@@ -354,11 +354,14 @@ def better_exec(code, context, text = None, realfile = "<code>"):
|
||||
code = better_compile(code, realfile, realfile)
|
||||
try:
|
||||
exec(code, get_context(), context)
|
||||
except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
|
||||
# Error already shown so passthrough, no need for traceback
|
||||
except bb.BBHandledException:
|
||||
# Error already shown so passthrough
|
||||
raise
|
||||
except Exception as e:
|
||||
(t, value, tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
raise
|
||||
try:
|
||||
_print_exception(t, value, tb, realfile, text, context)
|
||||
except Exception as e:
|
||||
@@ -522,7 +525,7 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
if removed_vars:
|
||||
if len(removed_vars):
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
return removed_vars
|
||||
@@ -530,7 +533,7 @@ def filter_environment(good_vars):
|
||||
def approved_variables():
|
||||
"""
|
||||
Determine and return the list of whitelisted variables which are approved
|
||||
to remain in the environment.
|
||||
to remain in the envrionment.
|
||||
"""
|
||||
if 'BB_PRESERVE_ENV' in os.environ:
|
||||
return os.environ.keys()
|
||||
@@ -575,11 +578,30 @@ def build_environment(d):
|
||||
if export:
|
||||
os.environ[var] = d.getVar(var, True) or ""
|
||||
|
||||
def _check_unsafe_delete_path(path):
|
||||
"""
|
||||
Basic safeguard against recursively deleting something we shouldn't. If it returns True,
|
||||
the caller should raise an exception with an appropriate message.
|
||||
NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
|
||||
with potentially disastrous results.
|
||||
"""
|
||||
extra = ''
|
||||
# HOME might not be /home/something, so in case we can get it, check against it
|
||||
homedir = os.environ.get('HOME', '')
|
||||
if homedir:
|
||||
extra = '|%s' % homedir
|
||||
if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def remove(path, recurse=False):
|
||||
"""Equivalent to rm -f or rm -rf"""
|
||||
if not path:
|
||||
return
|
||||
if recurse:
|
||||
for name in glob.glob(path):
|
||||
if _check_unsafe_delete_path(path):
|
||||
raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
|
||||
# shutil.rmtree(name) would be ideal but its too slow
|
||||
subprocess.call(['rm', '-rf'] + glob.glob(path))
|
||||
return
|
||||
@@ -593,6 +615,8 @@ def remove(path, recurse=False):
|
||||
def prunedir(topdir):
|
||||
# Delete everything reachable from the directory named in 'topdir'.
|
||||
# CAUTION: This is dangerous!
|
||||
if _check_unsafe_delete_path(topdir):
|
||||
raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
|
||||
for root, dirs, files in os.walk(topdir, topdown = False):
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
@@ -842,19 +866,6 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def contains_any(variable, checkvalues, truevalue, falsevalue, d):
|
||||
val = d.getVar(variable, True)
|
||||
if not val:
|
||||
return falsevalue
|
||||
val = set(val.split())
|
||||
if isinstance(checkvalues, basestring):
|
||||
checkvalues = set(checkvalues.split())
|
||||
else:
|
||||
checkvalues = set(checkvalues)
|
||||
if checkvalues & val:
|
||||
return truevalue
|
||||
return falsevalue
|
||||
|
||||
def cpu_count():
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
@@ -862,16 +873,21 @@ def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
def process_profilelog(fn):
|
||||
# Redirect stdout to capture profile information
|
||||
pout = open(fn + '.processed', 'w')
|
||||
so = sys.stdout.fileno()
|
||||
orig_so = os.dup(sys.stdout.fileno())
|
||||
os.dup2(pout.fileno(), so)
|
||||
|
||||
import pstats
|
||||
p = pstats.Stats(fn, stream=pout)
|
||||
p = pstats.Stats(fn)
|
||||
p.sort_stats('time')
|
||||
p.print_stats()
|
||||
p.print_callers()
|
||||
p.sort_stats('cumulative')
|
||||
p.print_stats()
|
||||
|
||||
os.dup2(orig_so, so)
|
||||
pout.flush()
|
||||
pout.close()
|
||||
|
||||
@@ -879,17 +895,5 @@ def process_profilelog(fn):
|
||||
# Was present to work around multiprocessing pool bugs in python < 2.7.3
|
||||
#
|
||||
def multiprocessingpool(*args, **kwargs):
|
||||
|
||||
import multiprocessing.pool
|
||||
#import multiprocessing.util
|
||||
#multiprocessing.util.log_to_stderr(10)
|
||||
# Deal with a multiprocessing bug where signals to the processes would be delayed until the work
|
||||
# completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
|
||||
def wrapper(func):
|
||||
def wrap(self, timeout=None):
|
||||
return func(self, timeout=timeout if timeout is not None else 1e100)
|
||||
return wrap
|
||||
multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
|
||||
|
||||
return multiprocessing.Pool(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
Behold, mortal, the origins of Beautiful Soup...
|
||||
================================================
|
||||
|
||||
Leonard Richardson is the primary programmer.
|
||||
|
||||
Aaron DeVore is awesome.
|
||||
|
||||
Mark Pilgrim provided the encoding detection code that forms the base
|
||||
of UnicodeDammit.
|
||||
|
||||
Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
|
||||
Soup 4 working under Python 3.
|
||||
|
||||
Simon Willison wrote soupselect, which was used to make Beautiful Soup
|
||||
support CSS selectors.
|
||||
|
||||
Sam Ruby helped with a lot of edge cases.
|
||||
|
||||
Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
|
||||
work in solving the nestable tags conundrum.
|
||||
|
||||
An incomplete list of people have contributed patches to Beautiful
|
||||
Soup:
|
||||
|
||||
Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
|
||||
Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
|
||||
Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
|
||||
Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
|
||||
Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
|
||||
Samastur, Jouni Sepp<70>nen, Alexander Schmolck, Andy Theyers, Glyn
|
||||
Webster, Paul Wright, Danny Yoo
|
||||
|
||||
An incomplete list of people who made suggestions or found bugs or
|
||||
found ways to break Beautiful Soup:
|
||||
|
||||
Hanno B<>ck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
|
||||
Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
|
||||
Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
|
||||
warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
|
||||
Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
|
||||
Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
|
||||
Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
|
||||
Sousa Rocha, Yichun Wei, Per Vognsen
|
||||
@@ -1,26 +0,0 @@
|
||||
Beautiful Soup is made available under the MIT license:
|
||||
|
||||
Copyright (c) 2004-2012 Leonard Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
||||
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE, DAMMIT.
|
||||
|
||||
Beautiful Soup incorporates code from the html5lib library, which is
|
||||
also made available under the MIT license.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,406 +0,0 @@
|
||||
"""Beautiful Soup
|
||||
Elixir and Tonic
|
||||
"The Screen-Scraper's Friend"
|
||||
http://www.crummy.com/software/BeautifulSoup/
|
||||
|
||||
Beautiful Soup uses a pluggable XML or HTML parser to parse a
|
||||
(possibly invalid) document into a tree representation. Beautiful Soup
|
||||
provides provides methods and Pythonic idioms that make it easy to
|
||||
navigate, search, and modify the parse tree.
|
||||
|
||||
Beautiful Soup works with Python 2.6 and up. It works better if lxml
|
||||
and/or html5lib is installed.
|
||||
|
||||
For more than you ever wanted to know about Beautiful Soup, see the
|
||||
documentation:
|
||||
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from .builder import builder_registry, ParserRejectedMarkup
|
||||
from .dammit import UnicodeDammit
|
||||
from .element import (
|
||||
CData,
|
||||
Comment,
|
||||
DEFAULT_OUTPUT_ENCODING,
|
||||
Declaration,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
PageElement,
|
||||
ProcessingInstruction,
|
||||
ResultSet,
|
||||
SoupStrainer,
|
||||
Tag,
|
||||
)
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
This class defines the basic interface called by the tree builders.
|
||||
|
||||
These methods will be called by the parser:
|
||||
reset()
|
||||
feed(markup)
|
||||
|
||||
The tree builder may call these methods from its feed() implementation:
|
||||
handle_starttag(name, attrs) # See note about return value
|
||||
handle_endtag(name)
|
||||
handle_data(data) # Appends to the current data node
|
||||
endData(containerClass=NavigableString) # Ends the current data node
|
||||
|
||||
No matter how complicated the underlying parser is, you should be
|
||||
able to build a tree using 'start tag' events, 'end tag' events,
|
||||
'data' events, and "done with data" events.
|
||||
|
||||
If you encounter an empty-element tag (aka a self-closing tag,
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
|
||||
if 'convertEntities' in kwargs:
|
||||
warnings.warn(
|
||||
"BS4 does not respect the convertEntities argument to the "
|
||||
"BeautifulSoup constructor. Entities are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'markupMassage' in kwargs:
|
||||
del kwargs['markupMassage']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the markupMassage argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for any necessary markup massage.")
|
||||
|
||||
if 'smartQuotesTo' in kwargs:
|
||||
del kwargs['smartQuotesTo']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the smartQuotesTo argument to the "
|
||||
"BeautifulSoup constructor. Smart quotes are always converted "
|
||||
"to Unicode characters.")
|
||||
|
||||
if 'selfClosingTags' in kwargs:
|
||||
del kwargs['selfClosingTags']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the selfClosingTags argument to the "
|
||||
"BeautifulSoup constructor. The tree builder is responsible "
|
||||
"for understanding self-closing tags.")
|
||||
|
||||
if 'isHTML' in kwargs:
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
warnings.warn(
|
||||
'The "%s" argument to the BeautifulSoup constructor '
|
||||
'has been renamed to "%s."' % (old_name, new_name))
|
||||
value = kwargs[old_name]
|
||||
del kwargs[old_name]
|
||||
return value
|
||||
return None
|
||||
|
||||
parse_only = parse_only or deprecated_argument(
|
||||
"parseOnlyThese", "parse_only")
|
||||
|
||||
from_encoding = from_encoding or deprecated_argument(
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
builder_class = builder_registry.lookup(*features)
|
||||
if builder_class is None:
|
||||
raise FeatureNotFound(
|
||||
"Couldn't find a tree builder with the features you "
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
|
||||
self.parse_only = parse_only
|
||||
|
||||
if hasattr(markup, 'read'): # It's a file-type object.
|
||||
markup = markup.read()
|
||||
elif len(markup) <= 256:
|
||||
# Print out warnings for a couple beginner problems
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
possible_filename = markup
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
break
|
||||
except ParserRejectedMarkup:
|
||||
pass
|
||||
|
||||
# Clear out the markup and remove the builder's circular
|
||||
# reference to this object.
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
|
||||
self.builder.feed(self.markup)
|
||||
# Close out any unfinished strings and close all the open tags.
|
||||
self.endData()
|
||||
while self.currentTag.name != self.ROOT_TAG_NAME:
|
||||
self.popTag()
|
||||
|
||||
def reset(self):
|
||||
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
|
||||
self.hidden = 1
|
||||
self.builder.reset()
|
||||
self.current_data = []
|
||||
self.currentTag = None
|
||||
self.tagStack = []
|
||||
self.preserve_whitespace_tag_stack = []
|
||||
self.pushTag(self)
|
||||
|
||||
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
|
||||
"""Create a new tag associated with this soup."""
|
||||
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
|
||||
def insert_after(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
|
||||
|
||||
def popTag(self):
|
||||
tag = self.tagStack.pop()
|
||||
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
|
||||
self.preserve_whitespace_tag_stack.pop()
|
||||
#print "Pop", tag.name
|
||||
if self.tagStack:
|
||||
self.currentTag = self.tagStack[-1]
|
||||
return self.currentTag
|
||||
|
||||
def pushTag(self, tag):
|
||||
#print "Push", tag.name
|
||||
if self.currentTag:
|
||||
self.currentTag.contents.append(tag)
|
||||
self.tagStack.append(tag)
|
||||
self.currentTag = self.tagStack[-1]
|
||||
if tag.name in self.builder.preserve_whitespace_tags:
|
||||
self.preserve_whitespace_tag_stack.append(tag)
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
if not self.preserve_whitespace_tag_stack:
|
||||
strippable = True
|
||||
for i in current_data:
|
||||
if i not in self.ASCII_SPACES:
|
||||
strippable = False
|
||||
break
|
||||
if strippable:
|
||||
if '\n' in current_data:
|
||||
current_data = '\n'
|
||||
else:
|
||||
current_data = ' '
|
||||
|
||||
# Reset the data collector.
|
||||
self.current_data = []
|
||||
|
||||
# Should we add this string to the tree at all?
|
||||
if self.parse_only and len(self.tagStack) <= 1 and \
|
||||
(not self.parse_only.text or \
|
||||
not self.parse_only.search(current_data)):
|
||||
return
|
||||
|
||||
o = containerClass(current_data)
|
||||
self.object_was_parsed(o)
|
||||
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
stack up to but *not* including the most recent instqance of
|
||||
the given tag."""
|
||||
#print "Popping to %s" % name
|
||||
if name == self.ROOT_TAG_NAME:
|
||||
# The BeautifulSoup object itself can never be popped.
|
||||
return
|
||||
|
||||
most_recently_popped = None
|
||||
|
||||
stack_size = len(self.tagStack)
|
||||
for i in range(stack_size - 1, 0, -1):
|
||||
t = self.tagStack[i]
|
||||
if (name == t.name and nsprefix == t.prefix):
|
||||
if inclusivePop:
|
||||
most_recently_popped = self.popTag()
|
||||
break
|
||||
most_recently_popped = self.popTag()
|
||||
|
||||
return most_recently_popped
|
||||
|
||||
def handle_starttag(self, name, namespace, nsprefix, attrs):
|
||||
"""Push a start tag on to the stack.
|
||||
|
||||
If this method returns None, the tag was rejected by the
|
||||
SoupStrainer. You should proceed as if the tag had not occured
|
||||
in the document. For instance, if this was a self-closing tag,
|
||||
don't call handle_endtag.
|
||||
"""
|
||||
|
||||
# print "Start tag %s: %s" % (name, attrs)
|
||||
self.endData()
|
||||
|
||||
if (self.parse_only and len(self.tagStack) <= 1
|
||||
and (self.parse_only.text
|
||||
or not self.parse_only.search_tag(name, attrs))):
|
||||
return None
|
||||
|
||||
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
|
||||
self.currentTag, self._most_recent_element)
|
||||
if tag is None:
|
||||
return tag
|
||||
if self._most_recent_element:
|
||||
self._most_recent_element.next_element = tag
|
||||
self._most_recent_element = tag
|
||||
self.pushTag(tag)
|
||||
return tag
|
||||
|
||||
def handle_endtag(self, name, nsprefix=None):
|
||||
#print "End tag: " + name
|
||||
self.endData()
|
||||
self._popToTag(name, nsprefix)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.current_data.append(data)
|
||||
|
||||
def decode(self, pretty_print=False,
|
||||
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Returns a string or Unicode representation of this document.
|
||||
To get Unicode, pass None for encoding."""
|
||||
|
||||
if self.is_xml:
|
||||
# Print the XML declaration
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
indent_level = 0
|
||||
return prefix + super(BeautifulSoup, self).decode(
|
||||
indent_level, eventual_encoding, formatter)
|
||||
|
||||
# Alias to make it easier to type import: 'from bs4 import _soup'
|
||||
_s = BeautifulSoup
|
||||
_soup = BeautifulSoup
|
||||
|
||||
class BeautifulStoneSoup(BeautifulSoup):
|
||||
"""Deprecated interface to an XML parser."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['features'] = 'xml'
|
||||
warnings.warn(
|
||||
'The BeautifulStoneSoup class is deprecated. Instead of using '
|
||||
'it, pass features="xml" into the BeautifulSoup constructor.')
|
||||
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class StopParsing(Exception):
|
||||
pass
|
||||
|
||||
class FeatureNotFound(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
#By default, act as an HTML pretty-printer.
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print soup.prettify()
|
||||
@@ -1,321 +0,0 @@
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import sys
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
whitespace_re
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'HTMLTreeBuilder',
|
||||
'SAXTreeBuilder',
|
||||
'TreeBuilder',
|
||||
'TreeBuilderRegistry',
|
||||
]
|
||||
|
||||
# Some useful features for a TreeBuilder to have.
|
||||
FAST = 'fast'
|
||||
PERMISSIVE = 'permissive'
|
||||
STRICT = 'strict'
|
||||
XML = 'xml'
|
||||
HTML = 'html'
|
||||
HTML_5 = 'html5'
|
||||
|
||||
|
||||
class TreeBuilderRegistry(object):
|
||||
|
||||
def __init__(self):
|
||||
self.builders_for_feature = defaultdict(list)
|
||||
self.builders = []
|
||||
|
||||
def register(self, treebuilder_class):
|
||||
"""Register a treebuilder based on its advertised features."""
|
||||
for feature in treebuilder_class.features:
|
||||
self.builders_for_feature[feature].insert(0, treebuilder_class)
|
||||
self.builders.insert(0, treebuilder_class)
|
||||
|
||||
def lookup(self, *features):
|
||||
if len(self.builders) == 0:
|
||||
# There are no builders at all.
|
||||
return None
|
||||
|
||||
if len(features) == 0:
|
||||
# They didn't ask for any features. Give them the most
|
||||
# recently registered builder.
|
||||
return self.builders[0]
|
||||
|
||||
# Go down the list of features in order, and eliminate any builders
|
||||
# that don't match every feature.
|
||||
features = list(features)
|
||||
features.reverse()
|
||||
candidates = None
|
||||
candidate_set = None
|
||||
while len(features) > 0:
|
||||
feature = features.pop()
|
||||
we_have_the_feature = self.builders_for_feature.get(feature, [])
|
||||
if len(we_have_the_feature) > 0:
|
||||
if candidates is None:
|
||||
candidates = we_have_the_feature
|
||||
candidate_set = set(candidates)
|
||||
else:
|
||||
# Eliminate any candidates that don't have this feature.
|
||||
candidate_set = candidate_set.intersection(
|
||||
set(we_have_the_feature))
|
||||
|
||||
# The only valid candidates are the ones in candidate_set.
|
||||
# Go through the original list of candidates and pick the first one
|
||||
# that's in candidate_set.
|
||||
if candidate_set is None:
|
||||
return None
|
||||
for candidate in candidates:
|
||||
if candidate in candidate_set:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
# The BeautifulSoup class will take feature lists from developers and use them
|
||||
# to look up builders in this registry.
|
||||
builder_registry = TreeBuilderRegistry()
|
||||
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
|
||||
# A value for these tag/attribute combinations is a space- or
|
||||
# comma-separated list of CDATA, rather than a single CDATA.
|
||||
cdata_list_attributes = {}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.soup = None
|
||||
|
||||
def reset(self):
|
||||
pass
|
||||
|
||||
def can_be_empty_element(self, tag_name):
|
||||
"""Might a tag with this name be an empty-element tag?
|
||||
|
||||
The final markup may or may not actually present this tag as
|
||||
self-closing.
|
||||
|
||||
For instance: an HTMLBuilder does not consider a <p> tag to be
|
||||
an empty-element tag (it's not in
|
||||
HTMLBuilder.empty_element_tags). This means an empty <p> tag
|
||||
will be presented as "<p></p>", not "<p />".
|
||||
|
||||
The default implementation has no opinion about which tags are
|
||||
empty-element tags, so a tag will be presented as an
|
||||
empty-element tag if and only if it has no contents.
|
||||
"<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
|
||||
be left alone.
|
||||
"""
|
||||
if self.empty_element_tags is None:
|
||||
return True
|
||||
return tag_name in self.empty_element_tags
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
return markup, None, None, False
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""Wrap an HTML fragment to make it look like a document.
|
||||
|
||||
Different parsers do this differently. For instance, lxml
|
||||
introduces an empty <head> tag, and html5lib
|
||||
doesn't. Abstracting this away lets us write simple tests
|
||||
which run HTML fragments through the parser and compare the
|
||||
results against other HTML fragments.
|
||||
|
||||
This method should not be used outside of tests.
|
||||
"""
|
||||
return fragment
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
return False
|
||||
|
||||
def _replace_cdata_list_attribute_values(self, tag_name, attrs):
|
||||
"""Replaces class="foo bar" with class=["foo", "bar"]
|
||||
|
||||
Modifies its input in place.
|
||||
"""
|
||||
if not attrs:
|
||||
return attrs
|
||||
if self.cdata_list_attributes:
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
# for the same tag when rearranging the parse
|
||||
# tree. On the second call the attribute value
|
||||
# here is already a list. If this happens,
|
||||
# leave the value alone rather than trying to
|
||||
# split it again.
|
||||
values = value
|
||||
attrs[attr] = values
|
||||
return attrs
|
||||
|
||||
class SAXTreeBuilder(TreeBuilder):
|
||||
"""A Beautiful Soup treebuilder that listens for SAX events."""
|
||||
|
||||
def feed(self, markup):
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def startElement(self, name, attrs):
|
||||
attrs = dict((key[1], value) for key, value in list(attrs.items()))
|
||||
#print "Start %s, %r" % (name, attrs)
|
||||
self.soup.handle_starttag(name, attrs)
|
||||
|
||||
def endElement(self, name):
|
||||
#print "End %s" % name
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def startElementNS(self, nsTuple, nodeName, attrs):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.startElement(nodeName, attrs)
|
||||
|
||||
def endElementNS(self, nsTuple, nodeName):
|
||||
# Throw away (ns, nodeName) for now.
|
||||
self.endElement(nodeName)
|
||||
#handler.endElementNS((ns, node.nodeName), node.nodeName)
|
||||
|
||||
def startPrefixMapping(self, prefix, nodeValue):
|
||||
# Ignore the prefix for now.
|
||||
pass
|
||||
|
||||
def endPrefixMapping(self, prefix):
|
||||
# Ignore the prefix for now.
|
||||
# handler.endPrefixMapping(prefix)
|
||||
pass
|
||||
|
||||
def characters(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def startDocument(self):
|
||||
pass
|
||||
|
||||
def endDocument(self):
|
||||
pass
|
||||
|
||||
|
||||
class HTMLTreeBuilder(TreeBuilder):
|
||||
"""This TreeBuilder knows facts about HTML.
|
||||
|
||||
Such as which tags are empty-element tags.
|
||||
"""
|
||||
|
||||
preserve_whitespace_tags = set(['pre', 'textarea'])
|
||||
empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
|
||||
'spacer', 'link', 'frame', 'base'])
|
||||
|
||||
# The HTML standard defines these attributes as containing a
|
||||
# space-separated list of values, not a single value. That is,
|
||||
# class="foo bar" means that the 'class' attribute has two values,
|
||||
# 'foo' and 'bar', not the single value 'foo bar'. When we
|
||||
# encounter one of these attributes, we will parse its value into
|
||||
# a list of values if possible. Upon output, the list will be
|
||||
# converted back into a string.
|
||||
cdata_list_attributes = {
|
||||
"*" : ['class', 'accesskey', 'dropzone'],
|
||||
"a" : ['rel', 'rev'],
|
||||
"link" : ['rel', 'rev'],
|
||||
"td" : ["headers"],
|
||||
"th" : ["headers"],
|
||||
"td" : ["headers"],
|
||||
"form" : ["accept-charset"],
|
||||
"object" : ["archive"],
|
||||
|
||||
# These are HTML5 specific, as are *.accesskey and *.dropzone above.
|
||||
"area" : ["rel"],
|
||||
"icon" : ["sizes"],
|
||||
"iframe" : ["sandbox"],
|
||||
"output" : ["for"],
|
||||
}
|
||||
|
||||
def set_up_substitutions(self, tag):
|
||||
# We are only interested in <meta> tags
|
||||
if tag.name != 'meta':
|
||||
return False
|
||||
|
||||
http_equiv = tag.get('http-equiv')
|
||||
content = tag.get('content')
|
||||
charset = tag.get('charset')
|
||||
|
||||
# We are interested in <meta> tags that say what encoding the
|
||||
# document was originally in. This means HTML 5-style <meta>
|
||||
# tags that provide the "charset" attribute. It also means
|
||||
# HTML 4-style <meta> tags that provide the "content"
|
||||
# attribute and have "http-equiv" set to "content-type".
|
||||
#
|
||||
# In both cases we will replace the value of the appropriate
|
||||
# attribute with a standin object that can take on any
|
||||
# encoding.
|
||||
meta_encoding = None
|
||||
if charset is not None:
|
||||
# HTML 5 style:
|
||||
# <meta charset="utf8">
|
||||
meta_encoding = charset
|
||||
tag['charset'] = CharsetMetaAttributeValue(charset)
|
||||
|
||||
elif (content is not None and http_equiv is not None
|
||||
and http_equiv.lower() == 'content-type'):
|
||||
# HTML 4 style:
|
||||
# <meta http-equiv="content-type" content="text/html; charset=utf8">
|
||||
tag['content'] = ContentMetaAttributeValue(content)
|
||||
|
||||
return (meta_encoding is not None)
|
||||
|
||||
def register_treebuilders_from(module):
|
||||
"""Copy TreeBuilders from the given module into this module."""
|
||||
# I'm fairly sure this is not the best way to do this.
|
||||
this_module = sys.modules['bs4.builder']
|
||||
for name in module.__all__:
|
||||
obj = getattr(module, name)
|
||||
|
||||
if issubclass(obj, TreeBuilder):
|
||||
setattr(this_module, name, obj)
|
||||
this_module.__all__.append(name)
|
||||
# Register the builder while we're at it.
|
||||
this_module.builder_registry.register(obj)
|
||||
|
||||
class ParserRejectedMarkup(Exception):
|
||||
pass
|
||||
|
||||
# Builders are registered in reverse order of priority, so that custom
|
||||
# builder registrations will take precedence. In general, we want lxml
|
||||
# to take precedence over html5lib, because it's faster. And we only
|
||||
# want to use HTMLParser as a last result.
|
||||
from . import _htmlparser
|
||||
register_treebuilders_from(_htmlparser)
|
||||
try:
|
||||
from . import _html5lib
|
||||
register_treebuilders_from(_html5lib)
|
||||
except ImportError:
|
||||
# They don't have html5lib installed.
|
||||
pass
|
||||
try:
|
||||
from . import _lxml
|
||||
register_treebuilders_from(_lxml)
|
||||
except ImportError:
|
||||
# They don't have lxml installed.
|
||||
pass
|
||||
@@ -1,285 +0,0 @@
|
||||
__all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
HTML,
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
from html5lib.constants import namespaces
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NavigableString,
|
||||
Tag,
|
||||
)
|
||||
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
def feed(self, markup):
|
||||
if self.soup.parse_only is not None:
|
||||
warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
|
||||
parser = html5lib.HTMLParser(tree=self.create_treebuilder)
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
else:
|
||||
doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
|
||||
|
||||
def create_treebuilder(self, namespaceHTMLElements):
|
||||
self.underlying_builder = TreeBuilderForHtml5lib(
|
||||
self.soup, namespaceHTMLElements)
|
||||
return self.underlying_builder
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
|
||||
|
||||
def documentClass(self):
|
||||
self.soup.reset()
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def insertDoctype(self, token):
|
||||
name = token["name"]
|
||||
publicId = token["publicId"]
|
||||
systemId = token["systemId"]
|
||||
|
||||
doctype = Doctype.for_name_and_ids(name, publicId, systemId)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def elementClass(self, name, namespace):
|
||||
tag = self.soup.new_tag(name, namespace)
|
||||
return Element(tag, self.soup, namespace)
|
||||
|
||||
def commentClass(self, data):
|
||||
return TextNode(Comment(data), self.soup)
|
||||
|
||||
def fragmentClass(self):
|
||||
self.soup = BeautifulSoup("")
|
||||
self.soup.name = "[document_fragment]"
|
||||
return Element(self.soup, self.soup, None)
|
||||
|
||||
def appendChild(self, node):
|
||||
# XXX This code is not covered by the BS4 tests.
|
||||
self.soup.append(node.element)
|
||||
|
||||
def getDocument(self):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
self.element = element
|
||||
self.attrs = dict(self.element.attrs)
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
def keys(self):
|
||||
return list(self.attrs.keys())
|
||||
def __len__(self):
|
||||
return len(self.attrs)
|
||||
def __getitem__(self, name):
|
||||
return self.attrs[name]
|
||||
def __contains__(self, name):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(html5lib.treebuilders._base.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
html5lib.treebuilders._base.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
string_child = child = node
|
||||
elif isinstance(node, Tag):
|
||||
# Some other piece of code decided to pass in a Tag
|
||||
# instead of creating an Element object to contain the
|
||||
# Tag.
|
||||
child = node
|
||||
elif node.element.__class__ == NavigableString:
|
||||
string_child = child = node.element
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
and self.element.contents[-1].__class__ == NavigableString):
|
||||
# We are appending a string onto another string.
|
||||
# TODO This has O(n^2) performance, for input like
|
||||
# "a</a>a</a>a</a>..."
|
||||
old_element = self.element.contents[-1]
|
||||
new_element = self.soup.new_string(old_element + string_child)
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
# Tell Beautiful Soup to act as if it parsed this element
|
||||
# immediately after the parent's last descendant. (Or
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
self.soup.object_was_parsed(
|
||||
child, parent=self.element,
|
||||
most_recent_element=most_recent_element)
|
||||
|
||||
def getAttributes(self):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
for name, value in list(attributes.items()):
|
||||
if isinstance(name, tuple):
|
||||
new_name = NamespacedAttribute(*name)
|
||||
del attributes[name]
|
||||
attributes[new_name] = value
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
# Call set_up_substitutions manually.
|
||||
#
|
||||
# The Tag constructor called this method when the Tag was created,
|
||||
# but we just set/changed the attributes, so call it again.
|
||||
self.soup.builder.set_up_substitutions(self.element)
|
||||
attributes = property(getAttributes, setAttributes)
|
||||
|
||||
def insertText(self, data, insertBefore=None):
|
||||
if insertBefore:
|
||||
text = TextNode(self.soup.new_string(data), self.soup)
|
||||
self.insertBefore(data, insertBefore)
|
||||
else:
|
||||
self.appendChild(data)
|
||||
|
||||
def insertBefore(self, node, refNode):
|
||||
index = self.element.index(refNode.element)
|
||||
if (node.element.__class__ == NavigableString and self.element.contents
|
||||
and self.element.contents[index-1].__class__ == NavigableString):
|
||||
# (See comments in appendChild)
|
||||
old_node = self.element.contents[index-1]
|
||||
new_str = self.soup.new_string(old_node + node.element)
|
||||
old_node.replace_with(new_str)
|
||||
else:
|
||||
self.element.insert(index, node.element)
|
||||
node.parent = self
|
||||
|
||||
def removeChild(self, node):
|
||||
node.element.extract()
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
# are removed.
|
||||
final_next_element = element.next_sibling
|
||||
|
||||
new_parents_last_descendant = new_parent_element._last_descendant(False, False)
|
||||
if len(new_parent_element.contents) > 0:
|
||||
# The new parent already contains children. We will be
|
||||
# appending this tag's children to the end.
|
||||
new_parents_last_child = new_parent_element.contents[-1]
|
||||
new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
|
||||
else:
|
||||
# The new parent contains no children.
|
||||
new_parents_last_child = None
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
child.parent = new_parent_element
|
||||
new_parent_element.contents.append(child)
|
||||
|
||||
# Now that this element has no children, change its .next_element.
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
for key,value in self.attributes:
|
||||
node.attributes[key] = value
|
||||
return node
|
||||
|
||||
def hasContent(self):
|
||||
return self.element.contents
|
||||
|
||||
def getNameTuple(self):
|
||||
if self.namespace == None:
|
||||
return namespaces["html"], self.name
|
||||
else:
|
||||
return self.namespace, self.name
|
||||
|
||||
nameTuple = property(getNameTuple)
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
html5lib.treebuilders._base.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
def cloneNode(self):
|
||||
raise NotImplementedError
|
||||
@@ -1,258 +0,0 @@
|
||||
"""Use the HTMLParser library to parse HTML files that aren't too bad."""
|
||||
|
||||
__all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
|
||||
# argument, which we'd like to set to False. Unfortunately,
|
||||
# http://bugs.python.org/issue13273 makes strict=True a better bet
|
||||
# before Python 3.2.3.
|
||||
#
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
Comment,
|
||||
Declaration,
|
||||
Doctype,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.dammit import EntitySubstitution, UnicodeDammit
|
||||
|
||||
from bs4.builder import (
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
STRICT,
|
||||
)
|
||||
|
||||
|
||||
HTMLPARSER = 'html.parser'
|
||||
|
||||
class BeautifulSoupHTMLParser(HTMLParser):
|
||||
def handle_starttag(self, name, attrs):
|
||||
# XXX namespace
|
||||
attr_dict = {}
|
||||
for key, value in attrs:
|
||||
# Change None attribute values to the empty string
|
||||
# for consistency with the other tree builders.
|
||||
if value is None:
|
||||
value = ''
|
||||
attr_dict[key] = value
|
||||
attrvalue = '""'
|
||||
self.soup.handle_starttag(name, None, None, attr_dict)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self.soup.handle_endtag(name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self.soup.handle_data(data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
real_name = int(name.lstrip('X'), 16)
|
||||
else:
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
|
||||
if character is not None:
|
||||
data = character
|
||||
else:
|
||||
data = "&%s;" % name
|
||||
self.handle_data(data)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self.soup.endData()
|
||||
if data.startswith("DOCTYPE "):
|
||||
data = data[len("DOCTYPE "):]
|
||||
elif data == 'DOCTYPE':
|
||||
# i.e. "<!DOCTYPE>"
|
||||
data = ''
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(Doctype)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
if data.upper().startswith('CDATA['):
|
||||
cls = CData
|
||||
data = data[len('CDATA['):]
|
||||
else:
|
||||
cls = Declaration
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(cls)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
|
||||
def feed(self, markup):
|
||||
args, kwargs = self.parser_args
|
||||
parser = BeautifulSoupHTMLParser(*args, **kwargs)
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
|
||||
# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
|
||||
# string.
|
||||
#
|
||||
# XXX This code can be removed once most Python 3 users are on 3.2.3.
|
||||
if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
|
||||
import re
|
||||
attrfind_tolerant = re.compile(
|
||||
r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
|
||||
r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
|
||||
HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
|
||||
|
||||
locatestarttagend = re.compile(r"""
|
||||
<[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
|
||||
(?:\s+ # whitespace before attribute name
|
||||
(?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
|
||||
(?:\s*=\s* # value indicator
|
||||
(?:'[^']*' # LITA-enclosed value
|
||||
|\"[^\"]*\" # LIT-enclosed value
|
||||
|[^'\">\s]+ # bare value
|
||||
)
|
||||
)?
|
||||
)
|
||||
)*
|
||||
\s* # trailing whitespace
|
||||
""", re.VERBOSE)
|
||||
BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
|
||||
|
||||
from html.parser import tagfind, attrfind
|
||||
|
||||
def parse_starttag(self, i):
|
||||
self.__starttag_text = None
|
||||
endpos = self.check_for_whole_start_tag(i)
|
||||
if endpos < 0:
|
||||
return endpos
|
||||
rawdata = self.rawdata
|
||||
self.__starttag_text = rawdata[i:endpos]
|
||||
|
||||
# Now parse the data between i+1 and j into a tag and attrs
|
||||
attrs = []
|
||||
match = tagfind.match(rawdata, i+1)
|
||||
assert match, 'unexpected call to parse_starttag()'
|
||||
k = match.end()
|
||||
self.lasttag = tag = rawdata[i+1:k].lower()
|
||||
while k < endpos:
|
||||
if self.strict:
|
||||
m = attrfind.match(rawdata, k)
|
||||
else:
|
||||
m = attrfind_tolerant.match(rawdata, k)
|
||||
if not m:
|
||||
break
|
||||
attrname, rest, attrvalue = m.group(1, 2, 3)
|
||||
if not rest:
|
||||
attrvalue = None
|
||||
elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
|
||||
attrvalue[:1] == '"' == attrvalue[-1:]:
|
||||
attrvalue = attrvalue[1:-1]
|
||||
if attrvalue:
|
||||
attrvalue = self.unescape(attrvalue)
|
||||
attrs.append((attrname.lower(), attrvalue))
|
||||
k = m.end()
|
||||
|
||||
end = rawdata[k:endpos].strip()
|
||||
if end not in (">", "/>"):
|
||||
lineno, offset = self.getpos()
|
||||
if "\n" in self.__starttag_text:
|
||||
lineno = lineno + self.__starttag_text.count("\n")
|
||||
offset = len(self.__starttag_text) \
|
||||
- self.__starttag_text.rfind("\n")
|
||||
else:
|
||||
offset = offset + len(self.__starttag_text)
|
||||
if self.strict:
|
||||
self.error("junk characters in start tag: %r"
|
||||
% (rawdata[k:endpos][:20],))
|
||||
self.handle_data(rawdata[i:endpos])
|
||||
return endpos
|
||||
if end.endswith('/>'):
|
||||
# XHTML-style empty tag: <span attr="value" />
|
||||
self.handle_startendtag(tag, attrs)
|
||||
else:
|
||||
self.handle_starttag(tag, attrs)
|
||||
if tag in self.CDATA_CONTENT_ELEMENTS:
|
||||
self.set_cdata_mode(tag)
|
||||
return endpos
|
||||
|
||||
def set_cdata_mode(self, elem):
|
||||
self.cdata_elem = elem.lower()
|
||||
self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
|
||||
|
||||
BeautifulSoupHTMLParser.parse_starttag = parse_starttag
|
||||
BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = True
|
||||
@@ -1,233 +0,0 @@
|
||||
__all__ = [
|
||||
'LXMLTreeBuilderForXML',
|
||||
'LXMLTreeBuilder',
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
HTMLTreeBuilder,
|
||||
PERMISSIVE,
|
||||
ParserRejectedMarkup,
|
||||
TreeBuilder,
|
||||
XML)
|
||||
from bs4.dammit import EncodingDetector
|
||||
|
||||
LXML = 'lxml'
|
||||
|
||||
class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
DEFAULT_PARSER_CLASS = etree.XMLParser
|
||||
|
||||
is_xml = True
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
# This namespace mapping is specified in the XML Namespace
|
||||
# standard.
|
||||
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
|
||||
|
||||
def default_parser(self, encoding):
|
||||
# This can either return a parser object or a class, which
|
||||
# will be instantiated with default arguments.
|
||||
if self._default_parser is not None:
|
||||
return self._default_parser
|
||||
return etree.XMLParser(
|
||||
target=self, strip_cdata=False, recover=True, encoding=encoding)
|
||||
|
||||
def parser_for(self, encoding):
|
||||
# Use the default parser.
|
||||
parser = self.default_parser(encoding)
|
||||
|
||||
if isinstance(parser, collections.Callable):
|
||||
# Instantiate the parser with default arguments
|
||||
parser = parser(target=self, strip_cdata=False, encoding=encoding)
|
||||
return parser
|
||||
|
||||
def __init__(self, parser=None, empty_element_tags=None):
|
||||
# TODO: Issue a warning if parser is present but not a
|
||||
# callable, since that means there's no way to create new
|
||||
# parsers for different encodings.
|
||||
self._default_parser = parser
|
||||
if empty_element_tags is not None:
|
||||
self.empty_element_tags = set(empty_element_tags)
|
||||
self.soup = None
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def _getNsTag(self, tag):
|
||||
# Split the namespace URL out of a fully-qualified lxml tag
|
||||
# name. Copied from lxml's src/lxml/sax.py.
|
||||
if tag[0] == '{':
|
||||
return tuple(tag[1:].split('}', 1))
|
||||
else:
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
(markup, encoding, declared encoding,
|
||||
has undergone character replacement)
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
document_declared_encoding, False)
|
||||
|
||||
# Instead of using UnicodeDammit to convert the bytestring to
|
||||
# Unicode using different encodings, use EncodingDetector to
|
||||
# iterate over the encodings, and tell lxml to try to parse
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
# or the parser won't be initialized.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
try:
|
||||
self.parser = self.parser_for(self.soup.original_encoding)
|
||||
self.parser.feed(data)
|
||||
while len(data) != 0:
|
||||
# Now call feed() on the rest of the data, chunk by chunk.
|
||||
data = markup.read(self.CHUNK_SIZE)
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
self.nsmaps = [self.DEFAULT_NSMAPS]
|
||||
|
||||
def start(self, name, attrs, nsmap={}):
|
||||
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
|
||||
attrs = dict(attrs)
|
||||
nsprefix = None
|
||||
# Invert each namespace map as it comes in.
|
||||
if len(self.nsmaps) > 1:
|
||||
# There are no new namespaces for this tag, but
|
||||
# non-default namespaces are in play, so we need a
|
||||
# separate tag stack to know when they end.
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
|
||||
# Namespaces are in play. Find any attributes that came in
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
else:
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
attr = NamespacedAttribute(nsprefix, attr, namespace)
|
||||
new_attrs[attr] = value
|
||||
attrs = new_attrs
|
||||
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = self._prefix_for_namespace(namespace)
|
||||
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
|
||||
|
||||
def _prefix_for_namespace(self, namespace):
|
||||
"""Find the currently active prefix for the given namespace."""
|
||||
if namespace is None:
|
||||
return None
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
return inverted_nsmap[namespace]
|
||||
return None
|
||||
|
||||
def end(self, name):
|
||||
self.soup.endData()
|
||||
completed_tag = self.soup.tagStack[-1]
|
||||
namespace, name = self._getNsTag(name)
|
||||
nsprefix = None
|
||||
if namespace is not None:
|
||||
for inverted_nsmap in reversed(self.nsmaps):
|
||||
if inverted_nsmap is not None and namespace in inverted_nsmap:
|
||||
nsprefix = inverted_nsmap[namespace]
|
||||
break
|
||||
self.soup.handle_endtag(name, nsprefix)
|
||||
if len(self.nsmaps) > 1:
|
||||
# This tag, or one of its parents, introduced a namespace
|
||||
# mapping, so pop it off the stack.
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
|
||||
def doctype(self, name, pubid, system):
|
||||
self.soup.endData()
|
||||
doctype = Doctype.for_name_and_ids(name, pubid, system)
|
||||
self.soup.object_was_parsed(doctype)
|
||||
|
||||
def comment(self, content):
|
||||
"Handle comments as Comment objects."
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(content)
|
||||
self.soup.endData(Comment)
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
return etree.HTMLParser
|
||||
|
||||
def feed(self, markup):
|
||||
encoding = self.soup.original_encoding
|
||||
try:
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
@@ -1,829 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Beautiful Soup bonus library: Unicode, Dammit
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
|
||||
import codecs
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
|
||||
# Import a library to autodetect character encodings.
|
||||
chardet_type = None
|
||||
try:
|
||||
# First try the fast C implementation.
|
||||
# PyPI package: cchardet
|
||||
import cchardet
|
||||
def chardet_dammit(s):
|
||||
return cchardet.detect(s)['encoding']
|
||||
except ImportError:
|
||||
try:
|
||||
# Fall back to the pure Python implementation
|
||||
# Debian package: python-chardet
|
||||
# PyPI package: chardet
|
||||
import chardet
|
||||
def chardet_dammit(s):
|
||||
return chardet.detect(s)['encoding']
|
||||
#import chardet.constants
|
||||
#chardet.constants._debug = 1
|
||||
except ImportError:
|
||||
# No chardet available.
|
||||
def chardet_dammit(s):
|
||||
return None
|
||||
|
||||
# Available from http://cjkpython.i18n.org/.
|
||||
try:
|
||||
import iconv_codec
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
xml_encoding_re = re.compile(
|
||||
'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
|
||||
html_meta_re = re.compile(
|
||||
'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
|
||||
|
||||
class EntitySubstitution(object):
|
||||
|
||||
"""Substitute XML or HTML entities for the corresponding characters."""
|
||||
|
||||
def _populate_class_variables():
|
||||
lookup = {}
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
# is handled elsewhere.
|
||||
characters_for_re.append(character)
|
||||
lookup[character] = name
|
||||
# But we do want to turn " into the quotation mark.
|
||||
reverse_lookup[name] = character
|
||||
re_definition = "[%s]" % "".join(characters_for_re)
|
||||
return lookup, reverse_lookup, re.compile(re_definition)
|
||||
(CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
|
||||
CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
|
||||
|
||||
CHARACTER_TO_XML_ENTITY = {
|
||||
"'": "apos",
|
||||
'"': "quot",
|
||||
"&": "amp",
|
||||
"<": "lt",
|
||||
">": "gt",
|
||||
}
|
||||
|
||||
BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
|
||||
"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
|
||||
")")
|
||||
|
||||
AMPERSAND_OR_BRACKET = re.compile("([<>&])")
|
||||
|
||||
@classmethod
|
||||
def _substitute_html_entity(cls, matchobj):
|
||||
entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def _substitute_xml_entity(cls, matchobj):
|
||||
"""Used with a regular expression to substitute the
|
||||
appropriate XML entity for an XML special character."""
|
||||
entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
|
||||
return "&%s;" % entity
|
||||
|
||||
@classmethod
|
||||
def quoted_attribute_value(self, value):
|
||||
"""Make a value into a quoted XML attribute, possibly escaping it.
|
||||
|
||||
Most strings will be quoted using double quotes.
|
||||
|
||||
Bob's Bar -> "Bob's Bar"
|
||||
|
||||
If a string contains double quotes, it will be quoted using
|
||||
single quotes.
|
||||
|
||||
Welcome to "my bar" -> 'Welcome to "my bar"'
|
||||
|
||||
If a string contains both single and double quotes, the
|
||||
double quotes will be escaped, and the string will be quoted
|
||||
using double quotes.
|
||||
|
||||
Welcome to "Bob's Bar" -> "Welcome to "Bob's bar"
|
||||
"""
|
||||
quote_with = '"'
|
||||
if '"' in value:
|
||||
if "'" in value:
|
||||
# The string contains both single and double
|
||||
# quotes. Turn the double quotes into
|
||||
# entities. We quote the double quotes rather than
|
||||
# the single quotes because the entity name is
|
||||
# """ whether this is HTML or XML. If we
|
||||
# quoted the single quotes, we'd have to decide
|
||||
# between ' and &squot;.
|
||||
replace_with = """
|
||||
value = value.replace('"', replace_with)
|
||||
else:
|
||||
# There are double quotes but no single quotes.
|
||||
# We can use single quotes to quote the attribute.
|
||||
quote_with = "'"
|
||||
return quote_with + value + quote_with
|
||||
|
||||
@classmethod
|
||||
def substitute_xml(cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign
|
||||
will become <, the greater-than sign will become >,
|
||||
and any ampersands will become &. If you want ampersands
|
||||
that appear to be part of an entity definition to be left
|
||||
alone, use substitute_xml_containing_entities() instead.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets and ampersands.
|
||||
value = cls.AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_xml_containing_entities(
|
||||
cls, value, make_quoted_attribute=False):
|
||||
"""Substitute XML entities for special XML characters.
|
||||
|
||||
:param value: A string to be substituted. The less-than sign will
|
||||
become <, the greater-than sign will become >, and any
|
||||
ampersands that are not part of an entity defition will
|
||||
become &.
|
||||
|
||||
:param make_quoted_attribute: If True, then the string will be
|
||||
quoted, as befits an attribute value.
|
||||
"""
|
||||
# Escape angle brackets, and ampersands that aren't part of
|
||||
# entities.
|
||||
value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
|
||||
cls._substitute_xml_entity, value)
|
||||
|
||||
if make_quoted_attribute:
|
||||
value = cls.quoted_attribute_value(value)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def substitute_html(cls, s):
|
||||
"""Replace certain Unicode characters with named HTML entities.
|
||||
|
||||
This differs from data.encode(encoding, 'xmlcharrefreplace')
|
||||
in that the goal is to make the result more readable (to those
|
||||
with ASCII displays) rather than to recover from
|
||||
errors. There's absolutely nothing wrong with a UTF-8 string
|
||||
containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
|
||||
character with "é" will make it more readable to some
|
||||
people.
|
||||
"""
|
||||
return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
|
||||
cls._substitute_html_entity, s)
|
||||
|
||||
|
||||
class EncodingDetector:
|
||||
"""Suggests a number of possible encodings for a bytestring.
|
||||
|
||||
Order of precedence:
|
||||
|
||||
1. Encodings you specifically tell EncodingDetector to try first
|
||||
(the override_encodings argument to the constructor).
|
||||
|
||||
2. An encoding declared within the bytestring itself, either in an
|
||||
XML declaration (if the bytestring is to be interpreted as an XML
|
||||
document), or in a <meta> tag (if the bytestring is to be
|
||||
interpreted as an HTML document.)
|
||||
|
||||
3. An encoding detected through textual analysis by chardet,
|
||||
cchardet, or a similar external library.
|
||||
|
||||
4. UTF-8.
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
|
||||
# First order of business: strip a byte-order mark.
|
||||
self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
|
||||
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def encodings(self):
|
||||
"""Yield a number of encodings that might work for this markup."""
|
||||
tried = set()
|
||||
for e in self.override_encodings:
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
# Did the document originally start with a byte-order mark
|
||||
# that indicated its encoding?
|
||||
if self._usable(self.sniffed_encoding, tried):
|
||||
yield self.sniffed_encoding
|
||||
|
||||
# Look within the document for an XML or HTML encoding
|
||||
# declaration.
|
||||
if self.declared_encoding is None:
|
||||
self.declared_encoding = self.find_declared_encoding(
|
||||
self.markup, self.is_html)
|
||||
if self._usable(self.declared_encoding, tried):
|
||||
yield self.declared_encoding
|
||||
|
||||
# Use third-party character set detection to guess at the
|
||||
# encoding.
|
||||
if self.chardet_encoding is None:
|
||||
self.chardet_encoding = chardet_dammit(self.markup)
|
||||
if self._usable(self.chardet_encoding, tried):
|
||||
yield self.chardet_encoding
|
||||
|
||||
# As a last-ditch effort, try utf-8 and windows-1252.
|
||||
for e in ('utf-8', 'windows-1252'):
|
||||
if self._usable(e, tried):
|
||||
yield e
|
||||
|
||||
@classmethod
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
data = data[2:]
|
||||
elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16le'
|
||||
data = data[2:]
|
||||
elif data[:3] == b'\xef\xbb\xbf':
|
||||
encoding = 'utf-8'
|
||||
data = data[3:]
|
||||
elif data[:4] == b'\x00\x00\xfe\xff':
|
||||
encoding = 'utf-32be'
|
||||
data = data[4:]
|
||||
elif data[:4] == b'\xff\xfe\x00\x00':
|
||||
encoding = 'utf-32le'
|
||||
data = data[4:]
|
||||
return data, encoding
|
||||
|
||||
@classmethod
|
||||
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
|
||||
"""Given a document, tries to find its declared encoding.
|
||||
|
||||
An XML encoding is declared at the beginning of the document.
|
||||
|
||||
An HTML encoding is declared in a <meta> tag, hopefully near the
|
||||
beginning of the document.
|
||||
"""
|
||||
if search_entire_document:
|
||||
xml_endpos = html_endpos = len(markup)
|
||||
else:
|
||||
xml_endpos = 1024
|
||||
html_endpos = max(2048, int(len(markup) * 0.05))
|
||||
|
||||
declared_encoding = None
|
||||
declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
|
||||
if not declared_encoding_match and is_html:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
|
||||
class UnicodeDammit:
|
||||
"""A class for detecting the encoding of a *ML document and
|
||||
converting it to a Unicode string. If the source encoding is
|
||||
windows-1252, can replace MS smart quotes with their HTML or XML
|
||||
equivalents."""
|
||||
|
||||
# This dictionary maps commonly seen values for "charset" in HTML
|
||||
# meta tags to the corresponding Python codec names. It only covers
|
||||
# values that aren't in Python's aliases and can't be determined
|
||||
# by the heuristics in find_codec.
|
||||
CHARSET_ALIASES = {"macintosh": "mac-roman",
|
||||
"x-sjis": "shift-jis"}
|
||||
|
||||
ENCODINGS_WITH_SMART_QUOTES = [
|
||||
"windows-1252",
|
||||
"iso-8859-1",
|
||||
"iso-8859-2",
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
# The encoding detector may have stripped a byte-order mark.
|
||||
# Use the stripped markup from this point on.
|
||||
self.markup = self.detector.markup
|
||||
|
||||
u = None
|
||||
for encoding in self.detector.encodings:
|
||||
markup = self.detector.markup
|
||||
u = self._convert_from(encoding)
|
||||
if u is not None:
|
||||
break
|
||||
|
||||
if not u:
|
||||
# None of the encodings worked. As an absolute last resort,
|
||||
# try them again with character replacement.
|
||||
|
||||
for encoding in self.detector.encodings:
|
||||
if encoding != "ascii":
|
||||
u = self._convert_from(encoding, "replace")
|
||||
if u is not None:
|
||||
logging.warning(
|
||||
"Some characters could not be decoded, and were "
|
||||
"replaced with REPLACEMENT CHARACTER.")
|
||||
self.contains_replacement_characters = True
|
||||
break
|
||||
|
||||
# If none of that worked, we could at this point force it to
|
||||
# ASCII, but that would destroy so much data that I think
|
||||
# giving up is better.
|
||||
self.unicode_markup = u
|
||||
if not u:
|
||||
self.original_encoding = None
|
||||
|
||||
def _sub_ms_char(self, match):
|
||||
"""Changes a MS smart quote character to an XML or HTML
|
||||
entity, or an ASCII character."""
|
||||
orig = match.group(1)
|
||||
if self.smart_quotes_to == 'ascii':
|
||||
sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
|
||||
else:
|
||||
sub = self.MS_CHARS.get(orig)
|
||||
if type(sub) == tuple:
|
||||
if self.smart_quotes_to == 'xml':
|
||||
sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
|
||||
else:
|
||||
sub = '&'.encode() + sub[0].encode() + ';'.encode()
|
||||
else:
|
||||
sub = sub.encode()
|
||||
return sub
|
||||
|
||||
def _convert_from(self, proposed, errors="strict"):
|
||||
proposed = self.find_codec(proposed)
|
||||
if not proposed or (proposed, errors) in self.tried_encodings:
|
||||
return None
|
||||
self.tried_encodings.append((proposed, errors))
|
||||
markup = self.markup
|
||||
# Convert smart quotes to HTML if coming from an encoding
|
||||
# that might have them.
|
||||
if (self.smart_quotes_to is not None
|
||||
and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
|
||||
smart_quotes_re = b"([\x80-\x9f])"
|
||||
smart_quotes_compiled = re.compile(smart_quotes_re)
|
||||
markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
|
||||
|
||||
try:
|
||||
#print "Trying to convert document to %s (errors=%s)" % (
|
||||
# proposed, errors)
|
||||
u = self._to_unicode(markup, proposed, errors)
|
||||
self.markup = u
|
||||
self.original_encoding = proposed
|
||||
except Exception as e:
|
||||
#print "That didn't work!"
|
||||
#print e
|
||||
return None
|
||||
#print "Correct encoding: %s" % proposed
|
||||
return self.markup
|
||||
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
if not self.is_html:
|
||||
return None
|
||||
return self.detector.declared_encoding
|
||||
|
||||
def find_codec(self, charset):
|
||||
value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
|
||||
or (charset and self._codec(charset.replace("-", "")))
|
||||
or (charset and self._codec(charset.replace("-", "_")))
|
||||
or (charset and charset.lower())
|
||||
or charset
|
||||
)
|
||||
if value:
|
||||
return value.lower()
|
||||
return None
|
||||
|
||||
def _codec(self, charset):
|
||||
if not charset:
|
||||
return charset
|
||||
codec = None
|
||||
try:
|
||||
codecs.lookup(charset)
|
||||
codec = charset
|
||||
except (LookupError, ValueError):
|
||||
pass
|
||||
return codec
|
||||
|
||||
|
||||
# A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
|
||||
MS_CHARS = {b'\x80': ('euro', '20AC'),
|
||||
b'\x81': ' ',
|
||||
b'\x82': ('sbquo', '201A'),
|
||||
b'\x83': ('fnof', '192'),
|
||||
b'\x84': ('bdquo', '201E'),
|
||||
b'\x85': ('hellip', '2026'),
|
||||
b'\x86': ('dagger', '2020'),
|
||||
b'\x87': ('Dagger', '2021'),
|
||||
b'\x88': ('circ', '2C6'),
|
||||
b'\x89': ('permil', '2030'),
|
||||
b'\x8A': ('Scaron', '160'),
|
||||
b'\x8B': ('lsaquo', '2039'),
|
||||
b'\x8C': ('OElig', '152'),
|
||||
b'\x8D': '?',
|
||||
b'\x8E': ('#x17D', '17D'),
|
||||
b'\x8F': '?',
|
||||
b'\x90': '?',
|
||||
b'\x91': ('lsquo', '2018'),
|
||||
b'\x92': ('rsquo', '2019'),
|
||||
b'\x93': ('ldquo', '201C'),
|
||||
b'\x94': ('rdquo', '201D'),
|
||||
b'\x95': ('bull', '2022'),
|
||||
b'\x96': ('ndash', '2013'),
|
||||
b'\x97': ('mdash', '2014'),
|
||||
b'\x98': ('tilde', '2DC'),
|
||||
b'\x99': ('trade', '2122'),
|
||||
b'\x9a': ('scaron', '161'),
|
||||
b'\x9b': ('rsaquo', '203A'),
|
||||
b'\x9c': ('oelig', '153'),
|
||||
b'\x9d': '?',
|
||||
b'\x9e': ('#x17E', '17E'),
|
||||
b'\x9f': ('Yuml', ''),}
|
||||
|
||||
# A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
|
||||
# horrors like stripping diacritical marks to turn á into a, but also
|
||||
# contains non-horrors like turning “ into ".
|
||||
MS_CHARS_TO_ASCII = {
|
||||
b'\x80' : 'EUR',
|
||||
b'\x81' : ' ',
|
||||
b'\x82' : ',',
|
||||
b'\x83' : 'f',
|
||||
b'\x84' : ',,',
|
||||
b'\x85' : '...',
|
||||
b'\x86' : '+',
|
||||
b'\x87' : '++',
|
||||
b'\x88' : '^',
|
||||
b'\x89' : '%',
|
||||
b'\x8a' : 'S',
|
||||
b'\x8b' : '<',
|
||||
b'\x8c' : 'OE',
|
||||
b'\x8d' : '?',
|
||||
b'\x8e' : 'Z',
|
||||
b'\x8f' : '?',
|
||||
b'\x90' : '?',
|
||||
b'\x91' : "'",
|
||||
b'\x92' : "'",
|
||||
b'\x93' : '"',
|
||||
b'\x94' : '"',
|
||||
b'\x95' : '*',
|
||||
b'\x96' : '-',
|
||||
b'\x97' : '--',
|
||||
b'\x98' : '~',
|
||||
b'\x99' : '(TM)',
|
||||
b'\x9a' : 's',
|
||||
b'\x9b' : '>',
|
||||
b'\x9c' : 'oe',
|
||||
b'\x9d' : '?',
|
||||
b'\x9e' : 'z',
|
||||
b'\x9f' : 'Y',
|
||||
b'\xa0' : ' ',
|
||||
b'\xa1' : '!',
|
||||
b'\xa2' : 'c',
|
||||
b'\xa3' : 'GBP',
|
||||
b'\xa4' : '$', #This approximation is especially parochial--this is the
|
||||
#generic currency symbol.
|
||||
b'\xa5' : 'YEN',
|
||||
b'\xa6' : '|',
|
||||
b'\xa7' : 'S',
|
||||
b'\xa8' : '..',
|
||||
b'\xa9' : '',
|
||||
b'\xaa' : '(th)',
|
||||
b'\xab' : '<<',
|
||||
b'\xac' : '!',
|
||||
b'\xad' : ' ',
|
||||
b'\xae' : '(R)',
|
||||
b'\xaf' : '-',
|
||||
b'\xb0' : 'o',
|
||||
b'\xb1' : '+-',
|
||||
b'\xb2' : '2',
|
||||
b'\xb3' : '3',
|
||||
b'\xb4' : ("'", 'acute'),
|
||||
b'\xb5' : 'u',
|
||||
b'\xb6' : 'P',
|
||||
b'\xb7' : '*',
|
||||
b'\xb8' : ',',
|
||||
b'\xb9' : '1',
|
||||
b'\xba' : '(th)',
|
||||
b'\xbb' : '>>',
|
||||
b'\xbc' : '1/4',
|
||||
b'\xbd' : '1/2',
|
||||
b'\xbe' : '3/4',
|
||||
b'\xbf' : '?',
|
||||
b'\xc0' : 'A',
|
||||
b'\xc1' : 'A',
|
||||
b'\xc2' : 'A',
|
||||
b'\xc3' : 'A',
|
||||
b'\xc4' : 'A',
|
||||
b'\xc5' : 'A',
|
||||
b'\xc6' : 'AE',
|
||||
b'\xc7' : 'C',
|
||||
b'\xc8' : 'E',
|
||||
b'\xc9' : 'E',
|
||||
b'\xca' : 'E',
|
||||
b'\xcb' : 'E',
|
||||
b'\xcc' : 'I',
|
||||
b'\xcd' : 'I',
|
||||
b'\xce' : 'I',
|
||||
b'\xcf' : 'I',
|
||||
b'\xd0' : 'D',
|
||||
b'\xd1' : 'N',
|
||||
b'\xd2' : 'O',
|
||||
b'\xd3' : 'O',
|
||||
b'\xd4' : 'O',
|
||||
b'\xd5' : 'O',
|
||||
b'\xd6' : 'O',
|
||||
b'\xd7' : '*',
|
||||
b'\xd8' : 'O',
|
||||
b'\xd9' : 'U',
|
||||
b'\xda' : 'U',
|
||||
b'\xdb' : 'U',
|
||||
b'\xdc' : 'U',
|
||||
b'\xdd' : 'Y',
|
||||
b'\xde' : 'b',
|
||||
b'\xdf' : 'B',
|
||||
b'\xe0' : 'a',
|
||||
b'\xe1' : 'a',
|
||||
b'\xe2' : 'a',
|
||||
b'\xe3' : 'a',
|
||||
b'\xe4' : 'a',
|
||||
b'\xe5' : 'a',
|
||||
b'\xe6' : 'ae',
|
||||
b'\xe7' : 'c',
|
||||
b'\xe8' : 'e',
|
||||
b'\xe9' : 'e',
|
||||
b'\xea' : 'e',
|
||||
b'\xeb' : 'e',
|
||||
b'\xec' : 'i',
|
||||
b'\xed' : 'i',
|
||||
b'\xee' : 'i',
|
||||
b'\xef' : 'i',
|
||||
b'\xf0' : 'o',
|
||||
b'\xf1' : 'n',
|
||||
b'\xf2' : 'o',
|
||||
b'\xf3' : 'o',
|
||||
b'\xf4' : 'o',
|
||||
b'\xf5' : 'o',
|
||||
b'\xf6' : 'o',
|
||||
b'\xf7' : '/',
|
||||
b'\xf8' : 'o',
|
||||
b'\xf9' : 'u',
|
||||
b'\xfa' : 'u',
|
||||
b'\xfb' : 'u',
|
||||
b'\xfc' : 'u',
|
||||
b'\xfd' : 'y',
|
||||
b'\xfe' : 'b',
|
||||
b'\xff' : 'y',
|
||||
}
|
||||
|
||||
# A map used when removing rogue Windows-1252/ISO-8859-1
|
||||
# characters in otherwise UTF-8 documents.
|
||||
#
|
||||
# Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
|
||||
# Windows-1252.
|
||||
WINDOWS_1252_TO_UTF8 = {
|
||||
0x80 : b'\xe2\x82\xac', # €
|
||||
0x82 : b'\xe2\x80\x9a', # ‚
|
||||
0x83 : b'\xc6\x92', # ƒ
|
||||
0x84 : b'\xe2\x80\x9e', # „
|
||||
0x85 : b'\xe2\x80\xa6', # …
|
||||
0x86 : b'\xe2\x80\xa0', # †
|
||||
0x87 : b'\xe2\x80\xa1', # ‡
|
||||
0x88 : b'\xcb\x86', # ˆ
|
||||
0x89 : b'\xe2\x80\xb0', # ‰
|
||||
0x8a : b'\xc5\xa0', # Š
|
||||
0x8b : b'\xe2\x80\xb9', # ‹
|
||||
0x8c : b'\xc5\x92', # Œ
|
||||
0x8e : b'\xc5\xbd', # Ž
|
||||
0x91 : b'\xe2\x80\x98', # ‘
|
||||
0x92 : b'\xe2\x80\x99', # ’
|
||||
0x93 : b'\xe2\x80\x9c', # “
|
||||
0x94 : b'\xe2\x80\x9d', # ”
|
||||
0x95 : b'\xe2\x80\xa2', # •
|
||||
0x96 : b'\xe2\x80\x93', # –
|
||||
0x97 : b'\xe2\x80\x94', # —
|
||||
0x98 : b'\xcb\x9c', # ˜
|
||||
0x99 : b'\xe2\x84\xa2', # ™
|
||||
0x9a : b'\xc5\xa1', # š
|
||||
0x9b : b'\xe2\x80\xba', # ›
|
||||
0x9c : b'\xc5\x93', # œ
|
||||
0x9e : b'\xc5\xbe', # ž
|
||||
0x9f : b'\xc5\xb8', # Ÿ
|
||||
0xa0 : b'\xc2\xa0', #
|
||||
0xa1 : b'\xc2\xa1', # ¡
|
||||
0xa2 : b'\xc2\xa2', # ¢
|
||||
0xa3 : b'\xc2\xa3', # £
|
||||
0xa4 : b'\xc2\xa4', # ¤
|
||||
0xa5 : b'\xc2\xa5', # ¥
|
||||
0xa6 : b'\xc2\xa6', # ¦
|
||||
0xa7 : b'\xc2\xa7', # §
|
||||
0xa8 : b'\xc2\xa8', # ¨
|
||||
0xa9 : b'\xc2\xa9', # ©
|
||||
0xaa : b'\xc2\xaa', # ª
|
||||
0xab : b'\xc2\xab', # «
|
||||
0xac : b'\xc2\xac', # ¬
|
||||
0xad : b'\xc2\xad', #
|
||||
0xae : b'\xc2\xae', # ®
|
||||
0xaf : b'\xc2\xaf', # ¯
|
||||
0xb0 : b'\xc2\xb0', # °
|
||||
0xb1 : b'\xc2\xb1', # ±
|
||||
0xb2 : b'\xc2\xb2', # ²
|
||||
0xb3 : b'\xc2\xb3', # ³
|
||||
0xb4 : b'\xc2\xb4', # ´
|
||||
0xb5 : b'\xc2\xb5', # µ
|
||||
0xb6 : b'\xc2\xb6', # ¶
|
||||
0xb7 : b'\xc2\xb7', # ·
|
||||
0xb8 : b'\xc2\xb8', # ¸
|
||||
0xb9 : b'\xc2\xb9', # ¹
|
||||
0xba : b'\xc2\xba', # º
|
||||
0xbb : b'\xc2\xbb', # »
|
||||
0xbc : b'\xc2\xbc', # ¼
|
||||
0xbd : b'\xc2\xbd', # ½
|
||||
0xbe : b'\xc2\xbe', # ¾
|
||||
0xbf : b'\xc2\xbf', # ¿
|
||||
0xc0 : b'\xc3\x80', # À
|
||||
0xc1 : b'\xc3\x81', # Á
|
||||
0xc2 : b'\xc3\x82', # Â
|
||||
0xc3 : b'\xc3\x83', # Ã
|
||||
0xc4 : b'\xc3\x84', # Ä
|
||||
0xc5 : b'\xc3\x85', # Å
|
||||
0xc6 : b'\xc3\x86', # Æ
|
||||
0xc7 : b'\xc3\x87', # Ç
|
||||
0xc8 : b'\xc3\x88', # È
|
||||
0xc9 : b'\xc3\x89', # É
|
||||
0xca : b'\xc3\x8a', # Ê
|
||||
0xcb : b'\xc3\x8b', # Ë
|
||||
0xcc : b'\xc3\x8c', # Ì
|
||||
0xcd : b'\xc3\x8d', # Í
|
||||
0xce : b'\xc3\x8e', # Î
|
||||
0xcf : b'\xc3\x8f', # Ï
|
||||
0xd0 : b'\xc3\x90', # Ð
|
||||
0xd1 : b'\xc3\x91', # Ñ
|
||||
0xd2 : b'\xc3\x92', # Ò
|
||||
0xd3 : b'\xc3\x93', # Ó
|
||||
0xd4 : b'\xc3\x94', # Ô
|
||||
0xd5 : b'\xc3\x95', # Õ
|
||||
0xd6 : b'\xc3\x96', # Ö
|
||||
0xd7 : b'\xc3\x97', # ×
|
||||
0xd8 : b'\xc3\x98', # Ø
|
||||
0xd9 : b'\xc3\x99', # Ù
|
||||
0xda : b'\xc3\x9a', # Ú
|
||||
0xdb : b'\xc3\x9b', # Û
|
||||
0xdc : b'\xc3\x9c', # Ü
|
||||
0xdd : b'\xc3\x9d', # Ý
|
||||
0xde : b'\xc3\x9e', # Þ
|
||||
0xdf : b'\xc3\x9f', # ß
|
||||
0xe0 : b'\xc3\xa0', # à
|
||||
0xe1 : b'\xa1', # á
|
||||
0xe2 : b'\xc3\xa2', # â
|
||||
0xe3 : b'\xc3\xa3', # ã
|
||||
0xe4 : b'\xc3\xa4', # ä
|
||||
0xe5 : b'\xc3\xa5', # å
|
||||
0xe6 : b'\xc3\xa6', # æ
|
||||
0xe7 : b'\xc3\xa7', # ç
|
||||
0xe8 : b'\xc3\xa8', # è
|
||||
0xe9 : b'\xc3\xa9', # é
|
||||
0xea : b'\xc3\xaa', # ê
|
||||
0xeb : b'\xc3\xab', # ë
|
||||
0xec : b'\xc3\xac', # ì
|
||||
0xed : b'\xc3\xad', # í
|
||||
0xee : b'\xc3\xae', # î
|
||||
0xef : b'\xc3\xaf', # ï
|
||||
0xf0 : b'\xc3\xb0', # ð
|
||||
0xf1 : b'\xc3\xb1', # ñ
|
||||
0xf2 : b'\xc3\xb2', # ò
|
||||
0xf3 : b'\xc3\xb3', # ó
|
||||
0xf4 : b'\xc3\xb4', # ô
|
||||
0xf5 : b'\xc3\xb5', # õ
|
||||
0xf6 : b'\xc3\xb6', # ö
|
||||
0xf7 : b'\xc3\xb7', # ÷
|
||||
0xf8 : b'\xc3\xb8', # ø
|
||||
0xf9 : b'\xc3\xb9', # ù
|
||||
0xfa : b'\xc3\xba', # ú
|
||||
0xfb : b'\xc3\xbb', # û
|
||||
0xfc : b'\xc3\xbc', # ü
|
||||
0xfd : b'\xc3\xbd', # ý
|
||||
0xfe : b'\xc3\xbe', # þ
|
||||
}
|
||||
|
||||
MULTIBYTE_MARKERS_AND_SIZES = [
|
||||
(0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
|
||||
(0xe0, 0xef, 3), # 3-byte characters start with E0-EF
|
||||
(0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
|
||||
]
|
||||
|
||||
FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
|
||||
LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
|
||||
|
||||
@classmethod
|
||||
def detwingle(cls, in_bytes, main_encoding="utf8",
|
||||
embedded_encoding="windows-1252"):
|
||||
"""Fix characters from one encoding embedded in some other encoding.
|
||||
|
||||
Currently the only situation supported is Windows-1252 (or its
|
||||
subset ISO-8859-1), embedded in UTF-8.
|
||||
|
||||
The input must be a bytestring. If you've already converted
|
||||
the document to Unicode, you're too late.
|
||||
|
||||
The output is a bytestring in which `embedded_encoding`
|
||||
characters have been converted to their `main_encoding`
|
||||
equivalents.
|
||||
"""
|
||||
if embedded_encoding.replace('_', '-').lower() not in (
|
||||
'windows-1252', 'windows_1252'):
|
||||
raise NotImplementedError(
|
||||
"Windows-1252 and ISO-8859-1 are the only currently supported "
|
||||
"embedded encodings.")
|
||||
|
||||
if main_encoding.lower() not in ('utf8', 'utf-8'):
|
||||
raise NotImplementedError(
|
||||
"UTF-8 is the only currently supported main encoding.")
|
||||
|
||||
byte_chunks = []
|
||||
|
||||
chunk_start = 0
|
||||
pos = 0
|
||||
while pos < len(in_bytes):
|
||||
byte = in_bytes[pos]
|
||||
if not isinstance(byte, int):
|
||||
# Python 2.x
|
||||
byte = ord(byte)
|
||||
if (byte >= cls.FIRST_MULTIBYTE_MARKER
|
||||
and byte <= cls.LAST_MULTIBYTE_MARKER):
|
||||
# This is the start of a UTF-8 multibyte character. Skip
|
||||
# to the end.
|
||||
for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
|
||||
if byte >= start and byte <= end:
|
||||
pos += size
|
||||
break
|
||||
elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
|
||||
# We found a Windows-1252 character!
|
||||
# Save the string up to this point as a chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:pos])
|
||||
|
||||
# Now translate the Windows-1252 character into UTF-8
|
||||
# and add it as another, one-byte chunk.
|
||||
byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
|
||||
pos += 1
|
||||
chunk_start = pos
|
||||
else:
|
||||
# Go on to the next character.
|
||||
pos += 1
|
||||
if chunk_start == 0:
|
||||
# The string is unchanged.
|
||||
return in_bytes
|
||||
else:
|
||||
# Store the final chunk.
|
||||
byte_chunks.append(in_bytes[chunk_start:])
|
||||
return b''.join(byte_chunks)
|
||||
|
||||
@@ -1,204 +0,0 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
import cProfile
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
|
||||
import os
|
||||
import pstats
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import sys
|
||||
import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
for builder in builder_registry.builders:
|
||||
if name in builder.features:
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
|
||||
This lets you see how lxml parses a document when no Beautiful
|
||||
Soup code is running.
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
|
||||
def _p(self, s):
|
||||
print(s)
|
||||
|
||||
def handle_starttag(self, name, attrs):
|
||||
self._p("%s START" % name)
|
||||
|
||||
def handle_endtag(self, name):
|
||||
self._p("%s END" % name)
|
||||
|
||||
def handle_data(self, data):
|
||||
self._p("%s DATA" % data)
|
||||
|
||||
def handle_charref(self, name):
|
||||
self._p("%s CHARREF" % name)
|
||||
|
||||
def handle_entityref(self, name):
|
||||
self._p("%s ENTITYREF" % name)
|
||||
|
||||
def handle_comment(self, data):
|
||||
self._p("%s COMMENT" % data)
|
||||
|
||||
def handle_decl(self, data):
|
||||
self._p("%s DECL" % data)
|
||||
|
||||
def unknown_decl(self, data):
|
||||
self._p("%s UNKNOWN-DECL" % data)
|
||||
|
||||
def handle_pi(self, data):
|
||||
self._p("%s PI" % data)
|
||||
|
||||
def htmlparser_trace(data):
|
||||
"""Print out the HTMLParser events that occur during parsing.
|
||||
|
||||
This lets you see how HTMLParser parses a document when no
|
||||
Beautiful Soup code is running.
|
||||
"""
|
||||
parser = AnnouncingParser()
|
||||
parser.feed(data)
|
||||
|
||||
_vowels = "aeiou"
|
||||
_consonants = "bcdfghjklmnpqrstvwxyz"
|
||||
|
||||
def rword(length=5):
|
||||
"Generate a random word-like string."
|
||||
s = ''
|
||||
for i in range(length):
|
||||
if i % 2 == 0:
|
||||
t = _consonants
|
||||
else:
|
||||
t = _vowels
|
||||
s += random.choice(t)
|
||||
return s
|
||||
|
||||
def rsentence(length=4):
|
||||
"Generate a random sentence-like string."
|
||||
return " ".join(rword(random.randint(4,9)) for i in range(length))
|
||||
|
||||
def rdoc(num_elements=1000):
|
||||
"""Randomly generate an invalid HTML document."""
|
||||
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
|
||||
elements = []
|
||||
for i in range(num_elements):
|
||||
choice = random.randint(0,3)
|
||||
if choice == 0:
|
||||
# New tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("<%s>" % tag_name)
|
||||
elif choice == 1:
|
||||
elements.append(rsentence(random.randint(1,4)))
|
||||
elif choice == 2:
|
||||
# Close a tag.
|
||||
tag_name = random.choice(tag_names)
|
||||
elements.append("</%s>" % tag_name)
|
||||
return "<html>" + "\n".join(elements) + "</html>"
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
try:
|
||||
a = time.time()
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
|
||||
data = rdoc(num_elements)
|
||||
vars = dict(bs4=bs4, data=data, parser=parser)
|
||||
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
|
||||
|
||||
stats = pstats.Stats(filename)
|
||||
# stats.strip_dirs()
|
||||
stats.sort_stats("cumulative")
|
||||
stats.print_stats('_html5lib|bs4', 50)
|
||||
|
||||
if __name__ == '__main__':
|
||||
diagnose(sys.stdin.read())
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,592 +0,0 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
Comment,
|
||||
ContentMetaAttributeValue,
|
||||
Doctype,
|
||||
SoupStrainer,
|
||||
)
|
||||
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
default_builder = HTMLParserTreeBuilder
|
||||
|
||||
|
||||
class SoupTest(unittest.TestCase):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return default_builder()
|
||||
|
||||
def soup(self, markup, **kwargs):
|
||||
"""Build a Beautiful Soup object from markup."""
|
||||
builder = kwargs.pop('builder', self.default_builder)
|
||||
return BeautifulSoup(markup, builder=builder, **kwargs)
|
||||
|
||||
def document_for(self, markup):
|
||||
"""Turn an HTML fragment into a document.
|
||||
|
||||
The details depend on the builder.
|
||||
"""
|
||||
return self.default_builder.test_fragment_to_document(markup)
|
||||
|
||||
def assertSoupEquals(self, to_parse, compare_parsed_to=None):
|
||||
builder = self.default_builder
|
||||
obj = BeautifulSoup(to_parse, builder=builder)
|
||||
if compare_parsed_to is None:
|
||||
compare_parsed_to = to_parse
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
"""A basic test of a treebuilder's competence.
|
||||
|
||||
Any HTML treebuilder, present or future, should be able to pass
|
||||
these tests. With invalid markup, there's room for interpretation,
|
||||
and different parsers can handle it differently. But with the
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
|
||||
# Make sure a Doctype object was created.
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual(doctype.__class__, Doctype)
|
||||
self.assertEqual(doctype, doctype_fragment)
|
||||
self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
|
||||
|
||||
# Make sure that the doctype was correctly associated with the
|
||||
# parse tree and that the rest of the document parsed.
|
||||
self.assertEqual(soup.p.contents[0], 'foo')
|
||||
|
||||
def _document_with_doctype(self, doctype_fragment):
|
||||
"""Generate and parse a document with the given doctype."""
|
||||
doctype = '<!DOCTYPE %s>' % doctype_fragment
|
||||
markup = doctype + '\n<p>foo</p>'
|
||||
soup = self.soup(markup)
|
||||
return doctype, soup
|
||||
|
||||
def test_normal_doctypes(self):
|
||||
"""Make sure normal, everyday HTML doctypes are handled correctly."""
|
||||
self.assertDoctypeHandled("html")
|
||||
self.assertDoctypeHandled(
|
||||
'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
|
||||
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_public_doctype_with_url(self):
|
||||
doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
|
||||
self.assertDoctypeHandled(doctype)
|
||||
|
||||
def test_system_doctype(self):
|
||||
self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# We can handle a namespaced doctype with a system ID.
|
||||
self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# Test a namespaced doctype with a public id.
|
||||
self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out more or less the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
This is important because the builder is part of a
|
||||
BeautifulSoup object, and we want to be able to copy that.
|
||||
"""
|
||||
copy.deepcopy(self.default_builder)
|
||||
|
||||
def test_p_tag_is_never_empty_element(self):
|
||||
"""A <p> tag is never designated as an empty-element tag.
|
||||
|
||||
Even if the markup shows it as an empty-element tag, it
|
||||
shouldn't be presented that way.
|
||||
"""
|
||||
soup = self.soup("<p/>")
|
||||
self.assertFalse(soup.p.is_empty_element)
|
||||
self.assertEqual(str(soup.p), "<p></p>")
|
||||
|
||||
def test_unclosed_tags_get_closed(self):
|
||||
"""A tag that's not closed by the end of the document should be closed.
|
||||
|
||||
This applies to all tags except empty-element tags.
|
||||
"""
|
||||
self.assertSoupEquals("<p>", "<p></p>")
|
||||
self.assertSoupEquals("<b>", "<b></b>")
|
||||
|
||||
self.assertSoupEquals("<br>", "<br/>")
|
||||
|
||||
def test_br_is_always_empty_element_tag(self):
|
||||
"""A <br> tag is designated as an empty-element tag.
|
||||
|
||||
Some parsers treat <br></br> as one <br/> tag, some parsers as
|
||||
two tags, but it should always be an empty-element tag.
|
||||
"""
|
||||
soup = self.soup("<br></br>")
|
||||
self.assertTrue(soup.br.is_empty_element)
|
||||
self.assertEqual(str(soup.br), "<br/>")
|
||||
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
self.assertSoupEquals(markup)
|
||||
|
||||
soup = self.soup(markup)
|
||||
comment = soup.find(text="foobar")
|
||||
self.assertEqual(comment.__class__, Comment)
|
||||
|
||||
# The comment is properly integrated into the tree.
|
||||
foo = soup.find(text="foo")
|
||||
self.assertEqual(comment, foo.next_element)
|
||||
baz = soup.find(text="baz")
|
||||
self.assertEqual(comment, baz.previous_element)
|
||||
|
||||
def test_preserved_whitespace_in_pre_and_textarea(self):
|
||||
"""Whitespace must be preserved in <pre> and <textarea> tags."""
|
||||
self.assertSoupEquals("<pre> </pre>")
|
||||
self.assertSoupEquals("<textarea> woo </textarea>")
|
||||
|
||||
def test_nested_inline_elements(self):
|
||||
"""Inline elements can be nested indefinitely."""
|
||||
b_tag = "<b>Inside a B tag</b>"
|
||||
self.assertSoupEquals(b_tag)
|
||||
|
||||
nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
|
||||
self.assertSoupEquals(nested_b_tag)
|
||||
|
||||
def test_nested_block_level_elements(self):
|
||||
"""Block elements can be nested."""
|
||||
soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
|
||||
blockquote = soup.blockquote
|
||||
self.assertEqual(blockquote.p.b.string, 'Foo')
|
||||
self.assertEqual(blockquote.b.string, 'Foo')
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""One table can go inside another one."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tr><td>foo</td></tr></table>'
|
||||
'</td></tr></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_deeply_nested_multivalued_attribute(self):
|
||||
# html5lib can set the attributes of the same tag many times
|
||||
# as it rearranges the tree. This has caused problems with
|
||||
# multivalued attributes.
|
||||
markup = '<table><div><div class="css"></div></div></table>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
|
||||
def test_quot_entity_converted_to_quotation_mark(self):
|
||||
self.assertSoupEquals("<p>I said "good day!"</p>",
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
|
||||
def test_multipart_strings(self):
|
||||
"Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
very least they should not choke on namespaces or lose
|
||||
data."""
|
||||
|
||||
markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode())
|
||||
html = soup.html
|
||||
self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
|
||||
self.assertEqual(
|
||||
'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
|
||||
|
||||
def test_multivalued_attribute_value_becomes_list(self):
|
||||
markup = b'<a class="foo bar">'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(['foo', 'bar'], soup.a['class'])
|
||||
|
||||
#
|
||||
# Generally speaking, tests below this point are more tests of
|
||||
# Beautiful Soup than tests of the tree builders. But parsers are
|
||||
# weird, so we run these tests separately for every tree builder
|
||||
# to detect any differences between them.
|
||||
#
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
|
||||
parse_only=strainer)
|
||||
self.assertEqual(soup.decode(), "<b>bold</b>")
|
||||
|
||||
def test_single_quote_attribute_values_become_double_quotes(self):
|
||||
self.assertSoupEquals("<foo attr='bar'></foo>",
|
||||
'<foo attr="bar"></foo>')
|
||||
|
||||
def test_attribute_values_with_nested_quotes_are_left_alone(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
self.assertSoupEquals(text)
|
||||
|
||||
def test_attribute_values_with_double_nested_quotes_get_quoted(self):
|
||||
text = """<foo attr='bar "brawls" happen'>a</foo>"""
|
||||
soup = self.soup(text)
|
||||
soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
|
||||
self.assertSoupEquals(
|
||||
soup.foo.decode(),
|
||||
"""<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""")
|
||||
|
||||
def test_ampersand_in_attribute_value_gets_escaped(self):
|
||||
self.assertSoupEquals('<this is="really messed up & stuff"></this>',
|
||||
'<this is="really messed up & stuff"></this>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>',
|
||||
'<a href="http://example.org?a=1&b=2;3">foo</a>')
|
||||
|
||||
def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
|
||||
self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>')
|
||||
|
||||
def test_entities_in_strings_converted_during_parsing(self):
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
# Microsoft smart quotes are converted to Unicode characters during
|
||||
# parsing.
|
||||
quote = b"<p>\x91Foo\x92</p>"
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
def test_real_iso_latin_document(self):
|
||||
# Smoke test of interrelated functionality, using an
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
iso_latin_html = unicode_html.encode("iso-8859-1")
|
||||
|
||||
# Parse the ISO-Latin-1 HTML.
|
||||
soup = self.soup(iso_latin_html)
|
||||
# Encode it to UTF-8.
|
||||
result = soup.encode("utf-8")
|
||||
|
||||
# What do we expect the result to look like? Well, it would
|
||||
# look like unicode_html, except that the META tag would say
|
||||
# UTF-8 instead of ISO-Latin-1.
|
||||
expected = unicode_html.replace("ISO-Latin-1", "utf-8")
|
||||
|
||||
# And, of course, it would be in UTF-8, not Unicode.
|
||||
expected = expected.encode("utf-8")
|
||||
|
||||
# Ta-da!
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_real_shift_jis_document(self):
|
||||
# Smoke test to make sure the parser can handle a document in
|
||||
# Shift-JIS encoding, without choking.
|
||||
shift_jis_html = (
|
||||
b'<html><head></head><body><pre>'
|
||||
b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
|
||||
b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
|
||||
b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
|
||||
b'</pre></body></html>')
|
||||
unicode_html = shift_jis_html.decode("shift-jis")
|
||||
soup = self.soup(unicode_html)
|
||||
|
||||
# Make sure the parse tree is correctly encoded to various
|
||||
# encodings.
|
||||
self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
|
||||
self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
|
||||
|
||||
def test_real_hebrew_document(self):
|
||||
# A real-world test to make sure we can convert ISO-8859-9 (a
|
||||
# Hebrew encoding) to UTF-8.
|
||||
hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
|
||||
soup = self.soup(
|
||||
hebrew_document, from_encoding="iso8859-8")
|
||||
self.assertEqual(soup.original_encoding, 'iso8859-8')
|
||||
self.assertEqual(
|
||||
soup.encode('utf-8'),
|
||||
hebrew_document.decode("iso8859-8").encode("utf-8"))
|
||||
|
||||
def test_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta content="text/html; charset=x-sjis" '
|
||||
'http-equiv="Content-type"/>')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
|
||||
content = parsed_meta['content']
|
||||
self.assertEqual('text/html; charset=x-sjis', content)
|
||||
|
||||
# But that value is actually a ContentMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(content, ContentMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
|
||||
|
||||
# For the rest of the story, see TestSubstitutions in
|
||||
# test_tree.py.
|
||||
|
||||
def test_html5_style_meta_tag_reflects_current_encoding(self):
|
||||
# Here's the <meta> tag saying that a document is
|
||||
# encoded in Shift-JIS.
|
||||
meta_tag = ('<meta id="encoding" charset="x-sjis" />')
|
||||
|
||||
# Here's a document incorporating that meta tag.
|
||||
shift_jis_html = (
|
||||
'<html><head>\n%s\n'
|
||||
'<meta http-equiv="Content-language" content="ja"/>'
|
||||
'</head><body>Shift-JIS markup goes here.') % meta_tag
|
||||
soup = self.soup(shift_jis_html)
|
||||
|
||||
# Parse the document, and the charset is seemingly unaffected.
|
||||
parsed_meta = soup.find('meta', id="encoding")
|
||||
charset = parsed_meta['charset']
|
||||
self.assertEqual('x-sjis', charset)
|
||||
|
||||
# But that value is actually a CharsetMetaAttributeValue object.
|
||||
self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
|
||||
|
||||
# And it will take on a value that reflects its current
|
||||
# encoding.
|
||||
self.assertEqual('utf8', charset.encode("utf8"))
|
||||
|
||||
def test_tag_with_no_attributes_can_have_attributes_added(self):
|
||||
data = self.soup("<a>text</a>")
|
||||
data.a['foo'] = 'bar'
|
||||
self.assertEqual('<a foo="bar">text</a>', data.a.decode())
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8"), markup)
|
||||
|
||||
def test_formatter_processes_script_tag_for_xml_documents(self):
|
||||
doc = """
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
encoded = soup.encode()
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode("latin1"),
|
||||
b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
|
||||
|
||||
def test_large_xml_document(self):
|
||||
"""A large XML document should come out the same as it went in."""
|
||||
markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
|
||||
+ b'0' * (2**12)
|
||||
+ b'</root>')
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(soup.encode("utf-8"), markup)
|
||||
|
||||
|
||||
def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
|
||||
self.assertSoupEquals("<p>", "<p/>")
|
||||
self.assertSoupEquals("<p>foo</p>")
|
||||
|
||||
def test_namespaces_are_preserved(self):
|
||||
markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
|
||||
soup = self.soup(markup)
|
||||
root = soup.root
|
||||
self.assertEqual("http://example.com/", root['xmlns:a'])
|
||||
self.assertEqual("http://example.net/", root['xmlns:b'])
|
||||
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
# Since XHTML is not HTML5, HTML5 parsers are not tested to handle
|
||||
# XHTML documents in any particular way.
|
||||
pass
|
||||
|
||||
def test_html_tags_have_namespace(self):
|
||||
markup = "<a>"
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
|
||||
|
||||
def test_svg_tags_have_namespace(self):
|
||||
markup = '<svg><circle/></svg>'
|
||||
soup = self.soup(markup)
|
||||
namespace = "http://www.w3.org/2000/svg"
|
||||
self.assertEqual(namespace, soup.svg.namespace)
|
||||
self.assertEqual(namespace, soup.circle.namespace)
|
||||
|
||||
|
||||
def test_mathml_tags_have_namespace(self):
|
||||
markup = '<math><msqrt>5</msqrt></math>'
|
||||
soup = self.soup(markup)
|
||||
namespace = 'http://www.w3.org/1998/Math/MathML'
|
||||
self.assertEqual(namespace, soup.math.namespace)
|
||||
self.assertEqual(namespace, soup.msqrt.namespace)
|
||||
|
||||
def test_xml_declaration_becomes_comment(self):
|
||||
markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertTrue(isinstance(soup.contents[0], Comment))
|
||||
self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
|
||||
self.assertEqual("html", soup.contents[0].next_element.name)
|
||||
|
||||
def skipIf(condition, reason):
|
||||
def nothing(test, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def decorator(test_item):
|
||||
if condition:
|
||||
return nothing
|
||||
else:
|
||||
return test_item
|
||||
|
||||
return decorator
|
||||
@@ -1 +0,0 @@
|
||||
"The beautifulsoup tests."
|
||||
@@ -1,141 +0,0 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
builder_registry as registry,
|
||||
HTMLParserTreeBuilder,
|
||||
TreeBuilderRegistry,
|
||||
)
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError:
|
||||
HTML5LIB_PRESENT = False
|
||||
|
||||
try:
|
||||
from bs4.builder import (
|
||||
LXMLTreeBuilderForXML,
|
||||
LXMLTreeBuilder,
|
||||
)
|
||||
LXML_PRESENT = True
|
||||
except ImportError:
|
||||
LXML_PRESENT = False
|
||||
|
||||
|
||||
class BuiltInRegistryTest(unittest.TestCase):
|
||||
"""Test the built-in registry with the default builders registered."""
|
||||
|
||||
def test_combination(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('fast', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('permissive', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('strict', 'html'),
|
||||
HTMLParserTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib', 'html'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
def test_lookup_by_markup_type(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
|
||||
self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('xml'), None)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
|
||||
else:
|
||||
self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
|
||||
|
||||
def test_named_library(self):
|
||||
if LXML_PRESENT:
|
||||
self.assertEqual(registry.lookup('lxml', 'xml'),
|
||||
LXMLTreeBuilderForXML)
|
||||
self.assertEqual(registry.lookup('lxml', 'html'),
|
||||
LXMLTreeBuilder)
|
||||
if HTML5LIB_PRESENT:
|
||||
self.assertEqual(registry.lookup('html5lib'),
|
||||
HTML5TreeBuilder)
|
||||
|
||||
self.assertEqual(registry.lookup('html.parser'),
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
self.assertRaises(ValueError, BeautifulSoup,
|
||||
"", features="no-such-feature")
|
||||
|
||||
class RegistryTest(unittest.TestCase):
|
||||
"""Test the TreeBuilderRegistry class in general."""
|
||||
|
||||
def setUp(self):
|
||||
self.registry = TreeBuilderRegistry()
|
||||
|
||||
def builder_for_features(self, *feature_list):
|
||||
cls = type('Builder_' + '_'.join(feature_list),
|
||||
(object,), {'features' : feature_list})
|
||||
|
||||
self.registry.register(cls)
|
||||
return cls
|
||||
|
||||
def test_register_with_no_features(self):
|
||||
builder = self.builder_for_features()
|
||||
|
||||
# Since the builder advertises no features, you can't find it
|
||||
# by looking up features.
|
||||
self.assertEqual(self.registry.lookup('foo'), None)
|
||||
|
||||
# But you can find it by doing a lookup with no features, if
|
||||
# this happens to be the only registered builder.
|
||||
self.assertEqual(self.registry.lookup(), builder)
|
||||
|
||||
def test_register_with_features_makes_lookup_succeed(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('foo'), builder)
|
||||
self.assertEqual(self.registry.lookup('bar'), builder)
|
||||
|
||||
def test_lookup_fails_when_no_builder_implements_feature(self):
|
||||
builder = self.builder_for_features('foo', 'bar')
|
||||
self.assertEqual(self.registry.lookup('baz'), None)
|
||||
|
||||
def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
|
||||
builder1 = self.builder_for_features('foo')
|
||||
builder2 = self.builder_for_features('bar')
|
||||
self.assertEqual(self.registry.lookup(), builder2)
|
||||
|
||||
def test_lookup_fails_when_no_tree_builders_registered(self):
|
||||
self.assertEqual(self.registry.lookup(), None)
|
||||
|
||||
def test_lookup_gets_most_recent_builder_supporting_all_features(self):
|
||||
has_one = self.builder_for_features('foo')
|
||||
has_the_other = self.builder_for_features('bar')
|
||||
has_both_early = self.builder_for_features('foo', 'bar', 'baz')
|
||||
has_both_late = self.builder_for_features('foo', 'bar', 'quux')
|
||||
lacks_one = self.builder_for_features('bar')
|
||||
has_the_other = self.builder_for_features('foo')
|
||||
|
||||
# There are two builders featuring 'foo' and 'bar', but
|
||||
# the one that also features 'quux' was registered later.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar'),
|
||||
has_both_late)
|
||||
|
||||
# There is only one builder featuring 'foo', 'bar', and 'baz'.
|
||||
self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
|
||||
has_both_early)
|
||||
|
||||
def test_lookup_fails_when_cannot_reconcile_requested_features(self):
|
||||
builder1 = self.builder_for_features('foo', 'bar')
|
||||
builder2 = self.builder_for_features('foo', 'baz')
|
||||
self.assertEqual(self.registry.lookup('bar', 'baz'), None)
|
||||
@@ -1,36 +0,0 @@
|
||||
"Test harness for doctests."
|
||||
|
||||
# pylint: disable-msg=E0611,W0142
|
||||
|
||||
__metaclass__ = type
|
||||
__all__ = [
|
||||
'additional_tests',
|
||||
]
|
||||
|
||||
import atexit
|
||||
import doctest
|
||||
import os
|
||||
#from pkg_resources import (
|
||||
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
|
||||
import unittest
|
||||
|
||||
DOCTEST_FLAGS = (
|
||||
doctest.ELLIPSIS |
|
||||
doctest.NORMALIZE_WHITESPACE |
|
||||
doctest.REPORT_NDIFF)
|
||||
|
||||
|
||||
# def additional_tests():
|
||||
# "Run the doc tests (README.txt and docs/*, if any exist)"
|
||||
# doctest_files = [
|
||||
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
|
||||
# if resource_exists('bs4', 'docs'):
|
||||
# for name in resource_listdir('bs4', 'docs'):
|
||||
# if name.endswith('.txt'):
|
||||
# doctest_files.append(
|
||||
# os.path.abspath(
|
||||
# resource_filename('bs4', 'docs/%s' % name)))
|
||||
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
|
||||
# atexit.register(cleanup_resources)
|
||||
# return unittest.TestSuite((
|
||||
# doctest.DocFileSuite(*doctest_files, **kwargs)))
|
||||
@@ -1,85 +0,0 @@
|
||||
"""Tests to ensure that the html5lib tree builder generates good trees."""
|
||||
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
HTML5TreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not HTML5LIB_PRESENT,
|
||||
"html5lib seems not to be present, not testing its tree builder.")
|
||||
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
"""See ``HTML5TreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTML5TreeBuilder()
|
||||
|
||||
def test_soupstrainer(self):
|
||||
# The html5lib tree builder does not support SoupStrainers.
|
||||
strainer = SoupStrainer("b")
|
||||
markup = "<p>A <b>bold</b> statement.</p>"
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(
|
||||
soup.decode(), self.document_for(markup))
|
||||
|
||||
self.assertTrue(
|
||||
"the html5lib tree builder doesn't support parse_only" in
|
||||
str(w[0].message))
|
||||
|
||||
def test_correctly_nested_tables(self):
|
||||
"""html5lib inserts <tbody> tags where other parsers don't."""
|
||||
markup = ('<table id="1">'
|
||||
'<tr>'
|
||||
"<td>Here's another table:"
|
||||
'<table id="2">'
|
||||
'<tr><td>foo</td></tr>'
|
||||
'</table></td>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
markup,
|
||||
'<table id="1"><tbody><tr><td>Here\'s another table:'
|
||||
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
|
||||
'</td></tr></tbody></table>')
|
||||
|
||||
self.assertSoupEquals(
|
||||
"<table><thead><tr><td>Foo</td></tr></thead>"
|
||||
"<tbody><tr><td>Bar</td></tr></tbody>"
|
||||
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
|
||||
|
||||
def test_xml_declaration_followed_by_doctype(self):
|
||||
markup = '''<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
</head>
|
||||
<body>
|
||||
<p>foo</p>
|
||||
</body>
|
||||
</html>'''
|
||||
soup = self.soup(markup)
|
||||
# Verify that we can reach the <p> tag; this means the tree is connected.
|
||||
self.assertEqual(b"<p>foo</p>", soup.p.encode())
|
||||
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
@@ -1,19 +0,0 @@
|
||||
"""Tests to ensure that the html.parser tree builder generates good
|
||||
trees."""
|
||||
|
||||
from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
|
||||
from bs4.builder import HTMLParserTreeBuilder
|
||||
|
||||
class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return HTMLParserTreeBuilder()
|
||||
|
||||
def test_namespaced_system_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
|
||||
def test_namespaced_public_doctype(self):
|
||||
# html.parser can't handle namespaced doctypes, so skip this one.
|
||||
pass
|
||||
@@ -1,91 +0,0 @@
|
||||
"""Tests to ensure that the lxml tree builder generates good trees."""
|
||||
|
||||
import re
|
||||
import warnings
|
||||
|
||||
try:
|
||||
import lxml.etree
|
||||
LXML_PRESENT = True
|
||||
LXML_VERSION = lxml.etree.LXML_VERSION
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
LXML_VERSION = (0,)
|
||||
|
||||
if LXML_PRESENT:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, SoupStrainer
|
||||
from bs4.testing import skipIf
|
||||
from bs4.tests import test_htmlparser
|
||||
from bs4.testing import (
|
||||
HTMLTreeBuilderSmokeTest,
|
||||
XMLTreeBuilderSmokeTest,
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its tree builder.")
|
||||
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilder()
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
self.assertSoupEquals(
|
||||
"<p>foo�bar</p>", "<p>foobar</p>")
|
||||
|
||||
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
|
||||
# test if an old version of lxml is installed.
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
|
||||
"Skipping doctype test for old version of lxml to avoid segfault.")
|
||||
def test_empty_doctype(self):
|
||||
soup = self.soup("<!DOCTYPE>")
|
||||
doctype = soup.contents[0]
|
||||
self.assertEqual("", doctype.strip())
|
||||
|
||||
def test_beautifulstonesoup_is_xml_parser(self):
|
||||
# Make sure that the deprecated BSS class uses an xml builder
|
||||
# if one is installed.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = BeautifulStoneSoup("<b />")
|
||||
self.assertEqual(u"<b/>", unicode(soup.b))
|
||||
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""lxml strips the XML definition from an XHTML doc, which is fine."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>Hello.</title></head>
|
||||
<body>Goodbye.</body>
|
||||
</html>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
soup.encode("utf-8").replace(b"\n", b''),
|
||||
markup.replace(b'\n', b'').replace(
|
||||
b'<?xml version="1.0" encoding="utf-8"?>', b''))
|
||||
|
||||
|
||||
@skipIf(
|
||||
not LXML_PRESENT,
|
||||
"lxml seems not to be present, not testing its XML tree builder.")
|
||||
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
|
||||
"""See ``HTMLTreeBuilderSmokeTest``."""
|
||||
|
||||
@property
|
||||
def default_builder(self):
|
||||
return LXMLTreeBuilderForXML()
|
||||
@@ -1,434 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Tests of Beautiful Soup as a whole."""
|
||||
|
||||
import logging
|
||||
import unittest
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from bs4 import (
|
||||
BeautifulSoup,
|
||||
BeautifulStoneSoup,
|
||||
)
|
||||
from bs4.element import (
|
||||
CharsetMetaAttributeValue,
|
||||
ContentMetaAttributeValue,
|
||||
SoupStrainer,
|
||||
NamespacedAttribute,
|
||||
)
|
||||
import bs4.dammit
|
||||
from bs4.dammit import (
|
||||
EntitySubstitution,
|
||||
UnicodeDammit,
|
||||
)
|
||||
from bs4.testing import (
|
||||
SoupTest,
|
||||
skipIf,
|
||||
)
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
|
||||
LXML_PRESENT = True
|
||||
except ImportError, e:
|
||||
LXML_PRESENT = False
|
||||
|
||||
PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
|
||||
PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
|
||||
|
||||
class TestConstructor(SoupTest):
|
||||
|
||||
def test_short_unicode_input(self):
|
||||
data = u"<h1>éé</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"éé", soup.h1.string)
|
||||
|
||||
def test_embedded_null(self):
|
||||
data = u"<h1>foo\0bar</h1>"
|
||||
soup = self.soup(data)
|
||||
self.assertEqual(u"foo\0bar", soup.h1.string)
|
||||
|
||||
|
||||
class TestDeprecatedConstructorArguments(SoupTest):
|
||||
|
||||
def test_parseOnlyThese_renamed_to_parse_only(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("parseOnlyThese" in msg)
|
||||
self.assertTrue("parse_only" in msg)
|
||||
self.assertEqual(b"<b></b>", soup.encode())
|
||||
|
||||
def test_fromEncoding_renamed_to_from_encoding(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
utf8 = b"\xc3\xa9"
|
||||
soup = self.soup(utf8, fromEncoding="utf8")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("fromEncoding" in msg)
|
||||
self.assertTrue("from_encoding" in msg)
|
||||
self.assertEqual("utf8", soup.original_encoding)
|
||||
|
||||
def test_unrecognized_keyword_argument(self):
|
||||
self.assertRaises(
|
||||
TypeError, self.soup, "<a>", no_such_argument=True)
|
||||
|
||||
class TestWarnings(SoupTest):
|
||||
|
||||
def test_disk_file_warning(self):
|
||||
filehandle = tempfile.NamedTemporaryFile()
|
||||
filename = filehandle.name
|
||||
try:
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a filename" in msg)
|
||||
finally:
|
||||
filehandle.close()
|
||||
|
||||
# The file no longer exists, so Beautiful Soup will no longer issue the warning.
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup(filename)
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
def test_url_warning(self):
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/")
|
||||
msg = str(w[0].message)
|
||||
self.assertTrue("looks like a URL" in msg)
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
soup = self.soup("http://www.crummy.com/ is great")
|
||||
self.assertEqual(0, len(w))
|
||||
|
||||
class TestSelectiveParsing(SoupTest):
|
||||
|
||||
def test_parse_with_soupstrainer(self):
|
||||
markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
|
||||
strainer = SoupStrainer("b")
|
||||
soup = self.soup(markup, parse_only=strainer)
|
||||
self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
|
||||
|
||||
|
||||
class TestEntitySubstitution(unittest.TestCase):
|
||||
"""Standalone tests of the EntitySubstitution class."""
|
||||
def setUp(self):
|
||||
self.sub = EntitySubstitution
|
||||
|
||||
def test_simple_html_substitution(self):
|
||||
# Unicode characters corresponding to named HTML entites
|
||||
# are substituted, and no others.
|
||||
s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
|
||||
self.assertEqual(self.sub.substitute_html(s),
|
||||
u"foo∀\N{SNOWMAN}õbar")
|
||||
|
||||
def test_smart_quote_substitution(self):
|
||||
# MS smart quotes are a common source of frustration, so we
|
||||
# give them a special test.
|
||||
quotes = b"\x91\x92foo\x93\x94"
|
||||
dammit = UnicodeDammit(quotes)
|
||||
self.assertEqual(self.sub.substitute_html(dammit.markup),
|
||||
"‘’foo“”")
|
||||
|
||||
def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, False), s)
|
||||
|
||||
def test_xml_attribute_quoting_normally_uses_double_quotes(self):
|
||||
self.assertEqual(self.sub.substitute_xml("Welcome", True),
|
||||
'"Welcome"')
|
||||
self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
|
||||
'"Bob\'s Bar"')
|
||||
|
||||
def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
|
||||
s = 'Welcome to "my bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(s, True),
|
||||
"'Welcome to \"my bar\"'")
|
||||
|
||||
def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
|
||||
s = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml(s, True),
|
||||
'"Welcome to "Bob\'s Bar""')
|
||||
|
||||
def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
|
||||
quoted = 'Welcome to "Bob\'s Bar"'
|
||||
self.assertEqual(self.sub.substitute_xml(quoted), quoted)
|
||||
|
||||
def test_xml_quoting_handles_angle_brackets(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("foo<bar>"),
|
||||
"foo<bar>")
|
||||
|
||||
def test_xml_quoting_handles_ampersands(self):
|
||||
self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T")
|
||||
|
||||
def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml("ÁT&T"),
|
||||
"&Aacute;T&T")
|
||||
|
||||
def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
|
||||
self.assertEqual(
|
||||
self.sub.substitute_xml_containing_entities("ÁT&T"),
|
||||
"ÁT&T")
|
||||
|
||||
def test_quotes_not_html_substituted(self):
|
||||
"""There's no need to do this except inside attribute values."""
|
||||
text = 'Bob\'s "bar"'
|
||||
self.assertEqual(self.sub.substitute_html(text), text)
|
||||
|
||||
|
||||
class TestEncodingConversion(SoupTest):
|
||||
# Test Beautiful Soup's ability to decode and encode from various
|
||||
# encodings.
|
||||
|
||||
def setUp(self):
|
||||
super(TestEncodingConversion, self).setUp()
|
||||
self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
|
||||
self.utf8_data = self.unicode_data.encode("utf-8")
|
||||
# Just so you know what it looks like.
|
||||
self.assertEqual(
|
||||
self.utf8_data,
|
||||
b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
|
||||
|
||||
def test_ascii_in_unicode_out(self):
|
||||
# ASCII input is converted to Unicode. The original_encoding
|
||||
# attribute is set to 'utf-8', a superset of ASCII.
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
# Disable chardet, which will realize that the ASCII is ASCII.
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
ascii = b"<foo>a</foo>"
|
||||
soup_from_ascii = self.soup(ascii)
|
||||
unicode_output = soup_from_ascii.decode()
|
||||
self.assertTrue(isinstance(unicode_output, unicode))
|
||||
self.assertEqual(unicode_output, self.document_for(ascii.decode()))
|
||||
self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_unicode_in_unicode_out(self):
|
||||
# Unicode input is left alone. The original_encoding attribute
|
||||
# is not set.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
|
||||
self.assertEqual(soup_from_unicode.original_encoding, None)
|
||||
|
||||
def test_utf8_in_unicode_out(self):
|
||||
# UTF-8 input is converted to Unicode. The original_encoding
|
||||
# attribute is set.
|
||||
soup_from_utf8 = self.soup(self.utf8_data)
|
||||
self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
|
||||
self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
|
||||
|
||||
def test_utf8_out(self):
|
||||
# The internal data structures can be encoded as UTF-8.
|
||||
soup_from_unicode = self.soup(self.unicode_data)
|
||||
self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
|
||||
|
||||
@skipIf(
|
||||
PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
|
||||
"Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
|
||||
def test_attribute_name_containing_unicode_characters(self):
|
||||
markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
|
||||
self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
|
||||
|
||||
class TestUnicodeDammit(unittest.TestCase):
|
||||
"""Standalone tests of UnicodeDammit."""
|
||||
|
||||
def test_unicode_input(self):
|
||||
markup = u"I'm already Unicode! \N{SNOWMAN}"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(dammit.unicode_markup, markup)
|
||||
|
||||
def test_smart_quotes_to_unicode(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup)
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
|
||||
|
||||
def test_smart_quotes_to_xml_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="xml")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_html_entities(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="html")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, "<foo>‘’“”</foo>")
|
||||
|
||||
def test_smart_quotes_to_ascii(self):
|
||||
markup = b"<foo>\x91\x92\x93\x94</foo>"
|
||||
dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
|
||||
self.assertEqual(
|
||||
dammit.unicode_markup, """<foo>''""</foo>""")
|
||||
|
||||
def test_detect_utf8(self):
|
||||
utf8 = b"\xc3\xa9"
|
||||
dammit = UnicodeDammit(utf8)
|
||||
self.assertEqual(dammit.unicode_markup, u'\xe9')
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_convert_hebrew(self):
|
||||
hebrew = b"\xed\xe5\xec\xf9"
|
||||
dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
|
||||
self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
|
||||
|
||||
def test_dont_see_smart_quotes_where_there_are_none(self):
|
||||
utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
|
||||
dammit = UnicodeDammit(utf_8)
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
|
||||
|
||||
def test_ignore_inappropriate_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_ignore_invalid_codecs(self):
|
||||
utf8_data = u"Räksmörgås".encode("utf-8")
|
||||
for bad_encoding in ['.utf8', '...', 'utF---16.!']:
|
||||
dammit = UnicodeDammit(utf8_data, [bad_encoding])
|
||||
self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
|
||||
|
||||
def test_detect_html5_style_meta_tag(self):
|
||||
|
||||
for data in (
|
||||
b'<html><meta charset="euc-jp" /></html>',
|
||||
b"<html><meta charset='euc-jp' /></html>",
|
||||
b"<html><meta charset=euc-jp /></html>",
|
||||
b"<html><meta charset=euc-jp/></html>"):
|
||||
dammit = UnicodeDammit(data, is_html=True)
|
||||
self.assertEqual(
|
||||
"euc-jp", dammit.original_encoding)
|
||||
|
||||
def test_last_ditch_entity_replacement(self):
|
||||
# This is a UTF-8 document that contains bytestrings
|
||||
# completely incompatible with UTF-8 (ie. encoded with some other
|
||||
# encoding).
|
||||
#
|
||||
# Since there is no consistent encoding for the document,
|
||||
# Unicode, Dammit will eventually encode the document as UTF-8
|
||||
# and encode the incompatible characters as REPLACEMENT
|
||||
# CHARACTER.
|
||||
#
|
||||
# If chardet is installed, it will detect that the document
|
||||
# can be converted into ISO-8859-1 without errors. This happens
|
||||
# to be the wrong encoding, but it is a consistent encoding, so the
|
||||
# code we're testing here won't run.
|
||||
#
|
||||
# So we temporarily disable chardet if it's present.
|
||||
doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
|
||||
<html><b>\330\250\330\252\330\261</b>
|
||||
<i>\310\322\321\220\312\321\355\344</i></html>"""
|
||||
chardet = bs4.dammit.chardet_dammit
|
||||
logging.disable(logging.WARNING)
|
||||
try:
|
||||
def noop(str):
|
||||
return None
|
||||
bs4.dammit.chardet_dammit = noop
|
||||
dammit = UnicodeDammit(doc)
|
||||
self.assertEqual(True, dammit.contains_replacement_characters)
|
||||
self.assertTrue(u"\ufffd" in dammit.unicode_markup)
|
||||
|
||||
soup = BeautifulSoup(doc, "html.parser")
|
||||
self.assertTrue(soup.contains_replacement_characters)
|
||||
finally:
|
||||
logging.disable(logging.NOTSET)
|
||||
bs4.dammit.chardet_dammit = chardet
|
||||
|
||||
def test_byte_order_mark_removed(self):
|
||||
# A document written in UTF-16LE will have its byte order marker stripped.
|
||||
data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
|
||||
dammit = UnicodeDammit(data)
|
||||
self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
|
||||
self.assertEqual("utf-16le", dammit.original_encoding)
|
||||
|
||||
def test_detwingle(self):
|
||||
# Here's a UTF8 document.
|
||||
utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
|
||||
|
||||
# Here's a Windows-1252 document.
|
||||
windows_1252 = (
|
||||
u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
|
||||
u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
|
||||
|
||||
# Through some unholy alchemy, they've been stuck together.
|
||||
doc = utf8 + windows_1252 + utf8
|
||||
|
||||
# The document can't be turned into UTF-8:
|
||||
self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
|
||||
|
||||
# Unicode, Dammit thinks the whole document is Windows-1252,
|
||||
# and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
|
||||
|
||||
# But if we run it through fix_embedded_windows_1252, it's fixed:
|
||||
|
||||
fixed = UnicodeDammit.detwingle(doc)
|
||||
self.assertEqual(
|
||||
u"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
|
||||
|
||||
def test_detwingle_ignores_multibyte_characters(self):
|
||||
# Each of these characters has a UTF-8 representation ending
|
||||
# in \x93. \x93 is a smart quote if interpreted as
|
||||
# Windows-1252. But our code knows to skip over multibyte
|
||||
# UTF-8 characters, so they'll survive the process unscathed.
|
||||
for tricky_unicode_char in (
|
||||
u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
|
||||
u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
|
||||
u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
|
||||
):
|
||||
input = tricky_unicode_char.encode("utf8")
|
||||
self.assertTrue(input.endswith(b'\x93'))
|
||||
output = UnicodeDammit.detwingle(input)
|
||||
self.assertEqual(output, input)
|
||||
|
||||
class TestNamedspacedAttribute(SoupTest):
|
||||
|
||||
def test_name_may_be_none(self):
|
||||
a = NamespacedAttribute("xmlns", None)
|
||||
self.assertEqual(a, "xmlns")
|
||||
|
||||
def test_attribute_is_equivalent_to_colon_separated_string(self):
|
||||
a = NamespacedAttribute("a", "b")
|
||||
self.assertEqual("a:b", a)
|
||||
|
||||
def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
|
||||
a = NamespacedAttribute("a", "b", "c")
|
||||
b = NamespacedAttribute("a", "b", "c")
|
||||
self.assertEqual(a, b)
|
||||
|
||||
# The actual namespace is not considered.
|
||||
c = NamespacedAttribute("a", "b", None)
|
||||
self.assertEqual(a, c)
|
||||
|
||||
# But name and prefix are important.
|
||||
d = NamespacedAttribute("a", "z", "c")
|
||||
self.assertNotEqual(a, d)
|
||||
|
||||
e = NamespacedAttribute("z", "b", "c")
|
||||
self.assertNotEqual(a, e)
|
||||
|
||||
|
||||
class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = CharsetMetaAttributeValue("euc-jp")
|
||||
self.assertEqual("euc-jp", value)
|
||||
self.assertEqual("euc-jp", value.original_value)
|
||||
self.assertEqual("utf8", value.encode("utf8"))
|
||||
|
||||
|
||||
def test_content_meta_attribute_value(self):
|
||||
value = ContentMetaAttributeValue("text/html; charset=euc-jp")
|
||||
self.assertEqual("text/html; charset=euc-jp", value)
|
||||
self.assertEqual("text/html; charset=euc-jp", value.original_value)
|
||||
self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/python
|
||||
# -*- coding: iso-8859-1 -*-
|
||||
#
|
||||
# progressbar - Text progressbar library for python.
|
||||
|
||||
@@ -11,4 +11,4 @@ def init_logger(logfile, loglevel):
|
||||
logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
|
||||
|
||||
class NotFoundError(Exception):
|
||||
pass
|
||||
pass
|
||||
@@ -19,7 +19,6 @@ class PRTable(object):
|
||||
def __init__(self, conn, table, nohist):
|
||||
self.conn = conn
|
||||
self.nohist = nohist
|
||||
self.dirty = False
|
||||
if nohist:
|
||||
self.table = "%s_nohist" % table
|
||||
else:
|
||||
@@ -48,11 +47,6 @@ class PRTable(object):
|
||||
self.conn.commit()
|
||||
self._execute("BEGIN EXCLUSIVE TRANSACTION")
|
||||
|
||||
def sync_if_dirty(self):
|
||||
if self.dirty:
|
||||
self.sync()
|
||||
self.dirty = False
|
||||
|
||||
def _getValueHist(self, version, pkgarch, checksum):
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
@@ -68,8 +62,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
@@ -97,8 +89,6 @@ class PRTable(object):
|
||||
logger.error(str(exc))
|
||||
self.conn.rollback()
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row=data.fetchone()
|
||||
@@ -128,8 +118,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
|
||||
(version, pkgarch, checksum))
|
||||
row = data.fetchone()
|
||||
@@ -151,8 +139,6 @@ class PRTable(object):
|
||||
except sqlite3.IntegrityError as exc:
|
||||
logger.error(str(exc))
|
||||
|
||||
self.dirty = True
|
||||
|
||||
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
|
||||
(version,pkgarch,checksum,value))
|
||||
row=data.fetchone()
|
||||
@@ -234,8 +220,6 @@ class PRData(object):
|
||||
raise e
|
||||
self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False)
|
||||
self.connection.row_factory=sqlite3.Row
|
||||
self.connection.execute("pragma synchronous = off;")
|
||||
self.connection.execute("PRAGMA journal_mode = WAL;")
|
||||
self._tables={}
|
||||
|
||||
def __del__(self):
|
||||
|
||||
@@ -38,17 +38,8 @@ singleton = None
|
||||
class PRServer(SimpleXMLRPCServer):
|
||||
def __init__(self, dbfile, logfile, interface, daemon=True):
|
||||
''' constructor '''
|
||||
import socket
|
||||
try:
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
logRequests=False, allow_none=True)
|
||||
except socket.error:
|
||||
ip=socket.gethostbyname(interface[0])
|
||||
port=interface[1]
|
||||
msg="PR Server unable to bind to %s:%s\n" % (ip, port)
|
||||
sys.stderr.write(msg)
|
||||
raise PRServiceConfigError
|
||||
|
||||
SimpleXMLRPCServer.__init__(self, interface,
|
||||
logRequests=False, allow_none=True)
|
||||
self.dbfile=dbfile
|
||||
self.daemon=daemon
|
||||
self.logfile=logfile
|
||||
@@ -76,19 +67,11 @@ class PRServer(SimpleXMLRPCServer):
|
||||
In addition, exception handling is done here.
|
||||
|
||||
"""
|
||||
iter_count = 1
|
||||
# With 60 iterations between syncs and a 0.5 second timeout between
|
||||
# iterations, this will sync if dirty every ~30 seconds.
|
||||
iterations_between_sync = 60
|
||||
|
||||
while True:
|
||||
(request, client_address) = self.requestqueue.get()
|
||||
try:
|
||||
self.finish_request(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
iter_count = (iter_count + 1) % iterations_between_sync
|
||||
if iter_count == 0:
|
||||
self.table.sync_if_dirty()
|
||||
except:
|
||||
self.handle_error(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.admin.filters import RelatedFieldListFilter
|
||||
from .models import BuildEnvironment
|
||||
|
||||
class BuildEnvironmentAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
admin.site.register(BuildEnvironment, BuildEnvironmentAdmin)
|
||||
@@ -1,202 +0,0 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
|
||||
|
||||
# load Bitbake components
|
||||
path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
|
||||
sys.path.insert(0, path)
|
||||
import bb.server.xmlrpc
|
||||
|
||||
class BitbakeController(object):
|
||||
""" This is the basic class that controlls a bitbake server.
|
||||
It is outside the scope of this class on how the server is started and aquired
|
||||
"""
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
def _runCommand(self, command):
|
||||
result, error = self.connection.connection.runCommand(command)
|
||||
if error:
|
||||
raise Exception(error)
|
||||
return result
|
||||
|
||||
def disconnect(self):
|
||||
return self.connection.removeClient()
|
||||
|
||||
def setVariable(self, name, value):
|
||||
return self._runCommand(["setVariable", name, value])
|
||||
|
||||
def build(self, targets, task = None):
|
||||
if task is None:
|
||||
task = "build"
|
||||
return self._runCommand(["buildTargets", targets, task])
|
||||
|
||||
|
||||
|
||||
def getBuildEnvironmentController(**kwargs):
|
||||
""" Gets you a BuildEnvironmentController that encapsulates a build environment,
|
||||
based on the query dictionary sent in.
|
||||
|
||||
This is used to retrieve, for example, the currently running BE from inside
|
||||
the toaster UI, or find a new BE to start a new build in it.
|
||||
|
||||
The return object MUST always be a BuildEnvironmentController.
|
||||
"""
|
||||
|
||||
from localhostbecontroller import LocalhostBEController
|
||||
from sshbecontroller import SSHBEController
|
||||
|
||||
be = BuildEnvironment.objects.filter(Q(**kwargs))[0]
|
||||
if be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
return LocalhostBEController(be)
|
||||
elif be.betype == BuildEnvironment.TYPE_SSH:
|
||||
return SSHBEController(be)
|
||||
else:
|
||||
raise Exception("FIXME: Implement BEC for type %s" % str(be.betype))
|
||||
|
||||
|
||||
def _getgitcheckoutdirectoryname(url):
|
||||
""" Utility that returns the last component of a git path as directory
|
||||
"""
|
||||
import re
|
||||
components = re.split(r'[:\.\/]', url)
|
||||
return components[-2] if components[-1] == "git" else components[-1]
|
||||
|
||||
|
||||
class BuildEnvironmentController(object):
|
||||
""" BuildEnvironmentController (BEC) is the abstract class that defines the operations that MUST
|
||||
or SHOULD be supported by a Build Environment. It is used to establish the framework, and must
|
||||
not be instantiated directly by the user.
|
||||
|
||||
Use the "getBuildEnvironmentController()" function to get a working BEC for your remote.
|
||||
|
||||
How the BuildEnvironments are discovered is outside the scope of this class.
|
||||
|
||||
You must derive this class to teach Toaster how to operate in your own infrastructure.
|
||||
We provide some specific BuildEnvironmentController classes that can be used either to
|
||||
directly set-up Toaster infrastructure, or as a model for your own infrastructure set:
|
||||
|
||||
* Localhost controller will run the Toaster BE on the same account as the web server
|
||||
(current user if you are using the the Django development web server)
|
||||
on the local machine, with the "build/" directory under the "poky/" source checkout directory.
|
||||
Bash is expected to be available.
|
||||
|
||||
* SSH controller will run the Toaster BE on a remote machine, where the current user
|
||||
can connect without raise Exception("FIXME: implement")word (set up with either ssh-agent or raise Exception("FIXME: implement")phrase-less key authentication)
|
||||
|
||||
"""
|
||||
def __init__(self, be):
|
||||
""" Takes a BuildEnvironment object as parameter that points to the settings of the BE.
|
||||
"""
|
||||
self.be = be
|
||||
self.connection = None
|
||||
|
||||
@staticmethod
|
||||
def _updateBBLayers(bblayerconf, layerlist):
|
||||
conflines = open(bblayerconf, "r").readlines()
|
||||
|
||||
bblayerconffile = open(bblayerconf, "w")
|
||||
skip = 0
|
||||
for i in xrange(len(conflines)):
|
||||
if skip > 0:
|
||||
skip =- 1
|
||||
continue
|
||||
if conflines[i].startswith("# line added by toaster"):
|
||||
skip = 1
|
||||
else:
|
||||
bblayerconffile.write(conflines[i])
|
||||
|
||||
bblayerconffile.write("# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
|
||||
bblayerconffile.close()
|
||||
|
||||
|
||||
|
||||
def startBBServer(self, brbe):
|
||||
""" Starts a BB server with Toaster toasterui set up to record the builds, an no controlling UI.
|
||||
After this method executes, self.be bbaddress/bbport MUST point to a running and free server,
|
||||
and the bbstate MUST be updated to "started".
|
||||
"""
|
||||
raise Exception("Must override in order to actually start the BB server")
|
||||
|
||||
def stopBBServer(self):
|
||||
""" Stops the currently running BB server.
|
||||
The bbstate MUST be updated to "stopped".
|
||||
self.connection must be none.
|
||||
"""
|
||||
|
||||
def setLayers(self, bbs, ls):
|
||||
""" Checks-out bitbake executor and layers from git repositories.
|
||||
Sets the layer variables in the config file, after validating local layer paths.
|
||||
The bitbakes must be a 1-length list of BRBitbake
|
||||
The layer paths must be in a list of BRLayer object
|
||||
|
||||
a word of attention: by convention, the first layer for any build will be poky!
|
||||
"""
|
||||
raise Exception("Must override setLayers")
|
||||
|
||||
|
||||
def getBBController(self, brbe):
|
||||
""" returns a BitbakeController to an already started server; this is the point where the server
|
||||
starts if needed; or reconnects to the server if we can
|
||||
"""
|
||||
if not self.connection:
|
||||
self.startBBServer(brbe)
|
||||
self.be.lock = BuildEnvironment.LOCK_RUNNING
|
||||
self.be.save()
|
||||
|
||||
server = bb.server.xmlrpc.BitBakeXMLRPCClient()
|
||||
server.initServer()
|
||||
server.saveConnectionDetails("%s:%s" % (self.be.bbaddress, self.be.bbport))
|
||||
self.connection = server.establishConnection([])
|
||||
|
||||
self.be.bbtoken = self.connection.transport.connection_token
|
||||
self.be.save()
|
||||
|
||||
return BitbakeController(self.connection)
|
||||
|
||||
def getArtifact(path):
|
||||
""" This call returns an artifact identified by the 'path'. How 'path' is interpreted as
|
||||
up to the implementing BEC. The return MUST be a REST URL where a GET will actually return
|
||||
the content of the artifact, e.g. for use as a "download link" in a web UI.
|
||||
"""
|
||||
raise Exception("Must return the REST URL of the artifact")
|
||||
|
||||
def release(self):
|
||||
""" This stops the server and releases any resources. After this point, all resources
|
||||
are un-available for further reference
|
||||
"""
|
||||
raise Exception("Must override BE release")
|
||||
|
||||
class ShellCmdException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BuildSetupException(Exception):
|
||||
pass
|
||||
|
||||
@@ -1,212 +0,0 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
|
||||
import subprocess
|
||||
|
||||
from toastermain import settings
|
||||
|
||||
from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
|
||||
|
||||
class LocalhostBEController(BuildEnvironmentController):
|
||||
""" Implementation of the BuildEnvironmentController for the localhost;
|
||||
this controller manages the default build directory,
|
||||
the server setup and system start and stop for the localhost-type build environment
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, be):
|
||||
super(LocalhostBEController, self).__init__(be)
|
||||
self.dburl = settings.getDATABASE_URL()
|
||||
self.pokydirname = None
|
||||
self.islayerset = False
|
||||
|
||||
def _shellcmd(self, command, cwd = None):
|
||||
if cwd is None:
|
||||
cwd = self.be.sourcedir
|
||||
|
||||
p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(out,err) = p.communicate()
|
||||
if p.returncode:
|
||||
if len(err) == 0:
|
||||
err = "command: %s \n%s" % (command, out)
|
||||
else:
|
||||
err = "command: %s \n%s" % (command, err)
|
||||
raise ShellCmdException(err)
|
||||
else:
|
||||
return out
|
||||
|
||||
def _createdirpath(self, path):
|
||||
from os.path import dirname as DN
|
||||
if path == "":
|
||||
raise Exception("Invalid path creation specified.")
|
||||
if not os.path.exists(DN(path)):
|
||||
self._createdirpath(DN(path))
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path, 0755)
|
||||
|
||||
def _setupBE(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
self._createdirpath(self.be.builddir)
|
||||
self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
|
||||
|
||||
def startBBServer(self, brbe):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
|
||||
try:
|
||||
os.remove(os.path.join(self.be.builddir, "toaster_ui.log"))
|
||||
except OSError as e:
|
||||
import errno
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
cmd = "bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb brbe=%s\"" % (self.pokydirname, self.be.builddir, self.dburl, brbe)
|
||||
port = "-1"
|
||||
for i in self._shellcmd(cmd).split("\n"):
|
||||
if i.startswith("Bitbake server address"):
|
||||
port = i.split(" ")[-1]
|
||||
print "Found bitbake server port ", port
|
||||
|
||||
def _toaster_ui_started(filepath):
|
||||
if not os.path.exists(filepath):
|
||||
return False
|
||||
with open(filepath, "r") as f:
|
||||
for line in f:
|
||||
if line.startswith("NOTE: ToasterUI waiting for events"):
|
||||
return True
|
||||
return False
|
||||
|
||||
while not _toaster_ui_started(os.path.join(self.be.builddir, "toaster_ui.log")):
|
||||
import time
|
||||
print "DEBUG: Waiting server to start"
|
||||
time.sleep(0.5)
|
||||
|
||||
print("DEBUG: Started server")
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
self.be.bbaddress = "localhost"
|
||||
self.be.bbport = port
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STARTED
|
||||
self.be.save()
|
||||
|
||||
def stopBBServer(self):
|
||||
assert self.pokydirname and os.path.exists(self.pokydirname)
|
||||
assert self.islayerset
|
||||
print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
|
||||
(self.pokydirname, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
|
||||
self.be.bbstate = BuildEnvironment.SERVER_STOPPED
|
||||
self.be.save()
|
||||
print "Stopped server"
|
||||
|
||||
def setLayers(self, bitbakes, layers):
|
||||
""" a word of attention: by convention, the first layer for any build will be poky! """
|
||||
|
||||
assert self.be.sourcedir is not None
|
||||
assert len(bitbakes) == 1
|
||||
# set layers in the layersource
|
||||
|
||||
# 1. get a list of repos, and map dirpaths for each layer
|
||||
gitrepos = {}
|
||||
gitrepos[bitbakes[0].giturl] = []
|
||||
gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
|
||||
|
||||
for layer in layers:
|
||||
# we don't process local URLs
|
||||
if layer.giturl.startswith("file://"):
|
||||
continue
|
||||
if not layer.giturl in gitrepos:
|
||||
gitrepos[layer.giturl] = []
|
||||
gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
|
||||
for giturl in gitrepos.keys():
|
||||
commitid = gitrepos[giturl][0][2]
|
||||
for e in gitrepos[giturl]:
|
||||
if commitid != e[2]:
|
||||
import pprint
|
||||
raise BuildSetupException("More than one commit per git url, unsupported configuration: \n%s" % pprint.pformat(gitrepos))
|
||||
|
||||
|
||||
layerlist = []
|
||||
|
||||
# 2. checkout the repositories
|
||||
for giturl in gitrepos.keys():
|
||||
localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
|
||||
print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
|
||||
|
||||
# make sure our directory is a git repository
|
||||
if os.path.exists(localdirname):
|
||||
if not giturl in self._shellcmd("git remote -v", localdirname):
|
||||
raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
|
||||
else:
|
||||
self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
|
||||
# checkout the needed commit
|
||||
commit = gitrepos[giturl][0][2]
|
||||
|
||||
# branch magic name "HEAD" will inhibit checkout
|
||||
if commit != "HEAD":
|
||||
print "DEBUG: checking out commit ", commit, "to", localdirname
|
||||
self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
|
||||
|
||||
# take the localdirname as poky dir if we can find the oe-init-build-env
|
||||
if self.pokydirname is None and os.path.exists(os.path.join(localdirname, "oe-init-build-env")):
|
||||
print "DEBUG: selected poky dir name", localdirname
|
||||
self.pokydirname = localdirname
|
||||
|
||||
# make sure we have a working bitbake
|
||||
if not os.path.exists(os.path.join(self.pokydirname, 'bitbake')):
|
||||
print "DEBUG: checking bitbake into the poky dirname %s " % self.pokydirname
|
||||
self._shellcmd("git clone -b \"%s\" \"%s\" \"%s\" " % (bitbakes[0].commit, bitbakes[0].giturl, os.path.join(self.pokydirname, 'bitbake')))
|
||||
|
||||
# verify our repositories
|
||||
for name, dirpath, commit in gitrepos[giturl]:
|
||||
localdirpath = os.path.join(localdirname, dirpath)
|
||||
if not os.path.exists(localdirpath):
|
||||
raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
|
||||
|
||||
if name != "bitbake":
|
||||
layerlist.append(localdirpath.rstrip("/"))
|
||||
|
||||
print "DEBUG: current layer list ", layerlist
|
||||
|
||||
# 3. configure the build environment, so we have a conf/bblayers.conf
|
||||
assert self.pokydirname is not None
|
||||
self._setupBE()
|
||||
|
||||
# 4. update the bblayers.conf
|
||||
bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
|
||||
if not os.path.exists(bblayerconf):
|
||||
raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
|
||||
|
||||
BuildEnvironmentController._updateBBLayers(bblayerconf, layerlist)
|
||||
|
||||
self.islayerset = True
|
||||
return True
|
||||
|
||||
def release(self):
|
||||
assert self.be.sourcedir and os.path.exists(self.be.builddir)
|
||||
import shutil
|
||||
shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
|
||||
assert not os.path.exists(self.be.builddir)
|
||||
@@ -1,150 +0,0 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment
|
||||
from orm.models import ToasterSetting
|
||||
import os
|
||||
|
||||
def DN(path):
|
||||
if path is None:
|
||||
return ""
|
||||
else:
|
||||
return os.path.dirname(path)
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
help = "Verifies that the configured settings are valid and usable, or prompts the user to fix the settings."
|
||||
|
||||
|
||||
def _find_first_path_for_file(self, startdirectory, filename, level = 0):
|
||||
if level < 0:
|
||||
return None
|
||||
dirs = []
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isfile(j):
|
||||
if i == filename:
|
||||
return startdirectory
|
||||
elif os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
for j in dirs:
|
||||
ret = self._find_first_path_for_file(j, filename, level - 1)
|
||||
if ret is not None:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def _recursive_list_directories(self, startdirectory, level = 0):
|
||||
if level < 0:
|
||||
return []
|
||||
dirs = []
|
||||
try:
|
||||
for i in os.listdir(startdirectory):
|
||||
j = os.path.join(startdirectory, i)
|
||||
if os.path.isdir(j):
|
||||
dirs.append(j)
|
||||
except OSError:
|
||||
pass
|
||||
for j in dirs:
|
||||
dirs = dirs + self._recursive_list_directories(j, level - 1)
|
||||
return dirs
|
||||
|
||||
|
||||
def _get_suggested_sourcedir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
return DN(DN(DN(self._find_first_path_for_file(self.guesspath, "toasterconf.json", 4))))
|
||||
|
||||
def _get_suggested_builddir(self, be):
|
||||
if be.betype != BuildEnvironment.TYPE_LOCAL:
|
||||
return ""
|
||||
return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
|
||||
|
||||
|
||||
def handle(self, **options):
|
||||
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
|
||||
# refuse to start if we have no build environments
|
||||
while BuildEnvironment.objects.count() == 0:
|
||||
print(" !! No build environments found. Toaster needs at least one build environment in order to be able to run builds.\n" +
|
||||
"You can manually define build environments in the database table bldcontrol_buildenvironment.\n" +
|
||||
"Or Toaster can define a simple localhost-based build environment for you.")
|
||||
|
||||
i = raw_input(" -- Do you want to create a basic localhost build environment ? (Y/n) ");
|
||||
if not len(i) or i.startswith("y") or i.startswith("Y"):
|
||||
BuildEnvironment.objects.create(pk = 1, betype = 0)
|
||||
else:
|
||||
raise Exception("Toaster cannot start without build environments. Aborting.")
|
||||
|
||||
|
||||
# we make sure we have builddir and sourcedir for all defined build envionments
|
||||
for be in BuildEnvironment.objects.all():
|
||||
def _verify_be():
|
||||
is_changed = False
|
||||
print("Verifying the Build Environment type %s id %d." % (be.get_betype_display(), be.pk))
|
||||
if len(be.sourcedir) == 0:
|
||||
suggesteddir = self._get_suggested_sourcedir(be)
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.sourcedir) == 0 and len(suggesteddir) > 0:
|
||||
be.sourcedir = suggesteddir
|
||||
is_changed = True
|
||||
|
||||
if not be.sourcedir.startswith("/"):
|
||||
be.sourcedir = raw_input(" -- Layer sources checkout directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
if len(be.builddir) == 0:
|
||||
suggesteddir = self._get_suggested_builddir(be)
|
||||
be.builddir = raw_input(" -- Build directory may not be empty [guessed \"%s\"]:" % suggesteddir)
|
||||
if len(be.builddir) == 0 and len(suggesteddir) > 0:
|
||||
be.builddir = suggesteddir
|
||||
is_changed = True
|
||||
|
||||
if not be.builddir.startswith("/"):
|
||||
be.builddir = raw_input(" -- Build directory must be an absolute path:")
|
||||
is_changed = True
|
||||
|
||||
|
||||
if is_changed:
|
||||
print "Build configuration saved"
|
||||
be.save()
|
||||
|
||||
if is_changed and be.betype == BuildEnvironment.TYPE_LOCAL:
|
||||
for dirname in self._recursive_list_directories(be.sourcedir,2):
|
||||
if os.path.exists(os.path.join(dirname, ".templateconf")):
|
||||
import subprocess
|
||||
conffilepath, error = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE).communicate()
|
||||
conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
|
||||
candidatefilepath = os.path.join(dirname, conffilepath)
|
||||
if os.path.exists(candidatefilepath):
|
||||
i = raw_input(" -- Do you want to import basic layer configuration from \"%s\" ? (y/N):" % candidatefilepath)
|
||||
if len(i) and i.upper()[0] == 'Y':
|
||||
from loadconf import Command as LoadConfigCommand
|
||||
|
||||
LoadConfigCommand()._import_layer_config(candidatefilepath)
|
||||
# we run lsupdates after config update
|
||||
print "Layer configuration imported. Updating information from the layer source, please wait."
|
||||
from django.core.management import call_command
|
||||
call_command("lsupdates")
|
||||
|
||||
# we don't look for any other config files
|
||||
return is_changed
|
||||
|
||||
return is_changed
|
||||
|
||||
while (_verify_be()):
|
||||
pass
|
||||
|
||||
# verify that default settings are there
|
||||
if ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').count() != 1:
|
||||
ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
|
||||
ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
|
||||
|
||||
# we are just starting up. we must not have any builds in progress, or build environments taken
|
||||
for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
|
||||
BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
|
||||
|
||||
BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS).update(state = BuildRequest.REQ_FAILED)
|
||||
|
||||
BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
return 0
|
||||
@@ -1,174 +0,0 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
|
||||
from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer, ReleaseLayerSourcePriority
|
||||
import os
|
||||
|
||||
from checksettings import DN
|
||||
|
||||
def _reduce_canon_path(path):
|
||||
components = []
|
||||
for c in path.split("/"):
|
||||
if c == "..":
|
||||
del components[-1]
|
||||
elif c == ".":
|
||||
pass
|
||||
else:
|
||||
components.append(c)
|
||||
if len(components) < 2:
|
||||
components.append('')
|
||||
return "/".join(components)
|
||||
|
||||
def _get_id_for_sourcetype(s):
|
||||
for i in LayerSource.SOURCE_TYPE:
|
||||
if s == i[1]:
|
||||
return i[0]
|
||||
raise Exception("Could not find definition for sourcetype " + s)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Loads a toasterconf.json file in the database"
|
||||
args = "filepath"
|
||||
|
||||
|
||||
|
||||
def _import_layer_config(self, filepath):
|
||||
if not os.path.exists(filepath) or not os.path.isfile(filepath):
|
||||
raise Exception("Failed to find toaster config file %s ." % filepath)
|
||||
|
||||
import json, pprint
|
||||
data = json.loads(open(filepath, "r").read())
|
||||
|
||||
# verify config file validity before updating settings
|
||||
for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
|
||||
assert i in data
|
||||
|
||||
def _read_git_url_from_local_repository(address):
|
||||
url = None
|
||||
# we detect the remote name at runtime
|
||||
import subprocess
|
||||
(remote, remote_name) = address.split(":", 1)
|
||||
cmd = subprocess.Popen("git remote -v", shell=True, cwd = os.path.dirname(filepath), stdout=subprocess.PIPE, stderr = subprocess.PIPE)
|
||||
(out,err) = cmd.communicate()
|
||||
if cmd.returncode != 0:
|
||||
raise Exception("Error while importing layer vcs_url: git error: %s" % err)
|
||||
for line in out.split("\n"):
|
||||
try:
|
||||
(name, path) = line.split("\t", 1)
|
||||
if name == remote_name:
|
||||
url = path.split(" ")[0]
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
if url == None:
|
||||
raise Exception("Error while looking for remote \"%s\" in \"s\"" % (remote_name, lo.local_path))
|
||||
return url
|
||||
|
||||
|
||||
# import bitbake data
|
||||
for bvi in data['bitbake']:
|
||||
bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
|
||||
bvo.giturl = bvi['giturl']
|
||||
if bvi['giturl'].startswith("remote:"):
|
||||
bvo.giturl = _read_git_url_from_local_repository(bvi['giturl'])
|
||||
bvo.branch = bvi['branch']
|
||||
bvo.dirpath = bvi['dirpath']
|
||||
bvo.save()
|
||||
|
||||
# set the layer sources
|
||||
for lsi in data['layersources']:
|
||||
assert 'sourcetype' in lsi
|
||||
assert 'apiurl' in lsi
|
||||
assert 'name' in lsi
|
||||
assert 'branches' in lsi
|
||||
|
||||
|
||||
if _get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
|
||||
apiurl = lsi['apiurl']
|
||||
else:
|
||||
apiurl = _reduce_canon_path(os.path.join(DN(os.path.abspath(filepath)), lsi['apiurl']))
|
||||
|
||||
assert ((_get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX) or apiurl.startswith("/")), (lsi['sourcetype'],apiurl)
|
||||
|
||||
try:
|
||||
ls = LayerSource.objects.get(sourcetype = _get_id_for_sourcetype(lsi['sourcetype']), apiurl = apiurl)
|
||||
except LayerSource.DoesNotExist:
|
||||
ls = LayerSource.objects.create(
|
||||
name = lsi['name'],
|
||||
sourcetype = _get_id_for_sourcetype(lsi['sourcetype']),
|
||||
apiurl = apiurl
|
||||
)
|
||||
|
||||
layerbranches = []
|
||||
for branchname in lsi['branches']:
|
||||
bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
|
||||
layerbranches.append(bo)
|
||||
|
||||
if 'layers' in lsi:
|
||||
for layerinfo in lsi['layers']:
|
||||
lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
|
||||
if layerinfo['local_path'].startswith("/"):
|
||||
lo.local_path = layerinfo['local_path']
|
||||
else:
|
||||
lo.local_path = _reduce_canon_path(os.path.join(ls.apiurl, layerinfo['local_path']))
|
||||
|
||||
if not os.path.exists(lo.local_path):
|
||||
raise Exception("Local layer path %s must exists." % lo.local_path)
|
||||
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
if layerinfo['vcs_url'].startswith("remote:"):
|
||||
lo.vcs_url = _read_git_url_from_local_repository(layerinfo['vcs_url'])
|
||||
else:
|
||||
lo.vcs_url = layerinfo['vcs_url']
|
||||
|
||||
if 'layer_index_url' in layerinfo:
|
||||
lo.layer_index_url = layerinfo['layer_index_url']
|
||||
lo.save()
|
||||
|
||||
for branch in layerbranches:
|
||||
lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
|
||||
up_branch = branch,
|
||||
commit = branch.name,
|
||||
layer = lo)
|
||||
lvo.dirpath = layerinfo['dirpath']
|
||||
lvo.save()
|
||||
# set releases
|
||||
for ri in data['releases']:
|
||||
bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
|
||||
assert bvo is not None
|
||||
|
||||
ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo, branch_name = ri['branch'])
|
||||
ro.description = ri['description']
|
||||
ro.helptext = ri['helptext']
|
||||
ro.save()
|
||||
|
||||
# save layer source priority for release
|
||||
for ls_name in ri['layersourcepriority'].keys():
|
||||
rlspo, created = ReleaseLayerSourcePriority.objects.get_or_create(release = ro, layer_source = LayerSource.objects.get(name=ls_name))
|
||||
rlspo.priority = ri['layersourcepriority'][ls_name]
|
||||
rlspo.save()
|
||||
|
||||
for dli in ri['defaultlayers']:
|
||||
# find layers with the same name
|
||||
ReleaseDefaultLayer.objects.get_or_create( release = ro, layer_name = dli)
|
||||
|
||||
# set default release
|
||||
if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
|
||||
|
||||
# set default config variables
|
||||
for configname in data['config']:
|
||||
if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
|
||||
ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
|
||||
else:
|
||||
ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
raise CommandError("Need a path to the toasterconf.json file")
|
||||
filepath = args[0]
|
||||
self._import_layer_config(filepath)
|
||||
|
||||
|
||||
|
||||
@@ -1,95 +0,0 @@
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
from django.db import transaction
|
||||
from orm.models import Build
|
||||
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException, BuildSetupException
|
||||
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
|
||||
import os
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
args = ""
|
||||
help = "Schedules and executes build requests as possible. Does not return (interrupt with Ctrl-C)"
|
||||
|
||||
|
||||
@transaction.commit_on_success
|
||||
def _selectBuildEnvironment(self):
|
||||
bec = getBuildEnvironmentController(lock = BuildEnvironment.LOCK_FREE)
|
||||
bec.be.lock = BuildEnvironment.LOCK_LOCK
|
||||
bec.be.save()
|
||||
return bec
|
||||
|
||||
@transaction.commit_on_success
|
||||
def _selectBuildRequest(self):
|
||||
br = BuildRequest.objects.filter(state = BuildRequest.REQ_QUEUED).order_by('pk')[0]
|
||||
br.state = BuildRequest.REQ_INPROGRESS
|
||||
br.save()
|
||||
return br
|
||||
|
||||
def schedule(self):
|
||||
import traceback
|
||||
try:
|
||||
br = None
|
||||
try:
|
||||
# select the build environment and the request to build
|
||||
br = self._selectBuildRequest()
|
||||
except IndexError as e:
|
||||
return
|
||||
try:
|
||||
bec = self._selectBuildEnvironment()
|
||||
except IndexError as e:
|
||||
# we could not find a BEC; postpone the BR
|
||||
br.state = BuildRequest.REQ_QUEUED
|
||||
br.save()
|
||||
print "No build env"
|
||||
return
|
||||
|
||||
print "Build %s, Environment %s" % (br, bec.be)
|
||||
# let the build request know where it is being executed
|
||||
br.environment = bec.be
|
||||
br.save()
|
||||
|
||||
# set up the buid environment with the needed layers
|
||||
bec.setLayers(br.brbitbake_set.all(), br.brlayer_set.all())
|
||||
|
||||
# get the bb server running with the build req id and build env id
|
||||
bbctrl = bec.getBBController("%d:%d" % (br.pk, bec.be.pk))
|
||||
|
||||
# set the build configuration
|
||||
for variable in br.brvariable_set.all():
|
||||
bbctrl.setVariable(variable.name, variable.value)
|
||||
|
||||
# trigger the build command
|
||||
task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, br.brtarget_set.all()))
|
||||
if len(task) == 0:
|
||||
task = None
|
||||
bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())), task)
|
||||
|
||||
print "Build launched, exiting"
|
||||
# disconnect from the server
|
||||
bbctrl.disconnect()
|
||||
|
||||
# cleanup to be performed by toaster when the deed is done
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print " EE Error executing shell command\n", e
|
||||
traceback.print_exc(e)
|
||||
BRError.objects.create(req = br,
|
||||
errtype = str(type(e)),
|
||||
errmsg = str(e),
|
||||
traceback = traceback.format_exc(e))
|
||||
br.state = BuildRequest.REQ_FAILED
|
||||
br.save()
|
||||
bec.be.lock = BuildEnvironment.LOCK_FREE
|
||||
bec.be.save()
|
||||
|
||||
|
||||
def cleanup(self):
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
# environments locked for more than 30 seconds - they should be unlocked
|
||||
BuildEnvironment.objects.filter(lock=BuildEnvironment.LOCK_LOCK).filter(updated__lt = timezone.now() - timedelta(seconds = 30)).update(lock = BuildEnvironment.LOCK_FREE)
|
||||
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
self.cleanup()
|
||||
self.schedule()
|
||||
@@ -1,154 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding model 'BuildEnvironment'
|
||||
db.create_table(u'bldcontrol_buildenvironment', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('address', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
('betype', self.gf('django.db.models.fields.IntegerField')()),
|
||||
('bbaddress', self.gf('django.db.models.fields.CharField')(max_length=254, blank=True)),
|
||||
('bbport', self.gf('django.db.models.fields.IntegerField')(default=-1)),
|
||||
('bbtoken', self.gf('django.db.models.fields.CharField')(max_length=126, blank=True)),
|
||||
('bbstate', self.gf('django.db.models.fields.IntegerField')(default=0)),
|
||||
('lock', self.gf('django.db.models.fields.IntegerField')(default=0)),
|
||||
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
|
||||
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BuildEnvironment'])
|
||||
|
||||
# Adding model 'BuildRequest'
|
||||
db.create_table(u'bldcontrol_buildrequest', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('project', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Project'])),
|
||||
('build', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True)),
|
||||
('state', self.gf('django.db.models.fields.IntegerField')(default=0)),
|
||||
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
|
||||
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BuildRequest'])
|
||||
|
||||
# Adding model 'BRLayer'
|
||||
db.create_table(u'bldcontrol_brlayer', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'])),
|
||||
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
|
||||
('giturl', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
('commit', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BRLayer'])
|
||||
|
||||
# Adding model 'BRVariable'
|
||||
db.create_table(u'bldcontrol_brvariable', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'])),
|
||||
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
|
||||
('value', self.gf('django.db.models.fields.TextField')(blank=True)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BRVariable'])
|
||||
|
||||
# Adding model 'BRTarget'
|
||||
db.create_table(u'bldcontrol_brtarget', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'])),
|
||||
('target', self.gf('django.db.models.fields.CharField')(max_length=100)),
|
||||
('task', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BRTarget'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting model 'BuildEnvironment'
|
||||
db.delete_table(u'bldcontrol_buildenvironment')
|
||||
|
||||
# Deleting model 'BuildRequest'
|
||||
db.delete_table(u'bldcontrol_buildrequest')
|
||||
|
||||
# Deleting model 'BRLayer'
|
||||
db.delete_table(u'bldcontrol_brlayer')
|
||||
|
||||
# Deleting model 'BRVariable'
|
||||
db.delete_table(u'bldcontrol_brvariable')
|
||||
|
||||
# Deleting model 'BRTarget'
|
||||
db.delete_table(u'bldcontrol_brtarget')
|
||||
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,106 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'BuildEnvironment.sourcedir'
|
||||
db.add_column(u'bldcontrol_buildenvironment', 'sourcedir',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=512, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
# Adding field 'BuildEnvironment.builddir'
|
||||
db.add_column(u'bldcontrol_buildenvironment', 'builddir',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=512, blank=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting field 'BuildEnvironment.sourcedir'
|
||||
db.delete_column(u'bldcontrol_buildenvironment', 'sourcedir')
|
||||
|
||||
# Deleting field 'BuildEnvironment.builddir'
|
||||
db.delete_column(u'bldcontrol_buildenvironment', 'builddir')
|
||||
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,99 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'BRLayer.dirpath'
|
||||
db.add_column(u'bldcontrol_brlayer', 'dirpath',
|
||||
self.gf('django.db.models.fields.CharField')(default='', max_length=254),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting field 'BRLayer.dirpath'
|
||||
db.delete_column(u'bldcontrol_brlayer', 'dirpath')
|
||||
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,104 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import DataMigration
|
||||
from django.db import models
|
||||
|
||||
class Migration(DataMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
"Write your forwards methods here."
|
||||
# Note: Don't use "from appname.models import ModelName".
|
||||
# Use orm.ModelName to refer to models in this application,
|
||||
# and orm['appname.ModelName'] for models in other applications.
|
||||
try:
|
||||
orm.BuildEnvironment.objects.get(pk = 1)
|
||||
except:
|
||||
from django.utils import timezone
|
||||
orm.BuildEnvironment.objects.create(pk = 1,
|
||||
created = timezone.now(),
|
||||
updated = timezone.now(),
|
||||
betype = 0)
|
||||
|
||||
def backwards(self, orm):
|
||||
"Write your backwards methods here."
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
symmetrical = True
|
||||
@@ -1,112 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding model 'BRError'
|
||||
db.create_table(u'bldcontrol_brerror', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'])),
|
||||
('errtype', self.gf('django.db.models.fields.CharField')(max_length=100)),
|
||||
('errmsg', self.gf('django.db.models.fields.TextField')()),
|
||||
('traceback', self.gf('django.db.models.fields.TextField')()),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BRError'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting model 'BRError'
|
||||
db.delete_table(u'bldcontrol_brerror')
|
||||
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,128 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding model 'BRBitbake'
|
||||
db.create_table(u'bldcontrol_brbitbake', (
|
||||
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
|
||||
('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'], unique=True)),
|
||||
('giturl', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
('commit', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
('dirpath', self.gf('django.db.models.fields.CharField')(max_length=254)),
|
||||
))
|
||||
db.send_create_signal(u'bldcontrol', ['BRBitbake'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Deleting model 'BRBitbake'
|
||||
db.delete_table(u'bldcontrol_brbitbake')
|
||||
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
@@ -1,145 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from south.utils import datetime_utils as datetime
|
||||
from south.db import db
|
||||
from south.v2 import SchemaMigration
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
# Adding field 'BuildRequest.environment'
|
||||
db.add_column(u'bldcontrol_buildrequest', 'environment',
|
||||
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildEnvironment'], null=True),
|
||||
keep_default=False)
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['orm.Build'], unique=True, null=True))
|
||||
# Adding unique constraint on 'BuildRequest', fields ['build']
|
||||
db.create_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
|
||||
def backwards(self, orm):
|
||||
# Removing unique constraint on 'BuildRequest', fields ['build']
|
||||
db.delete_unique(u'bldcontrol_buildrequest', ['build_id'])
|
||||
|
||||
# Deleting field 'BuildRequest.environment'
|
||||
db.delete_column(u'bldcontrol_buildrequest', 'environment_id')
|
||||
|
||||
|
||||
# Changing field 'BuildRequest.build'
|
||||
db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True))
|
||||
|
||||
models = {
|
||||
u'bldcontrol.brbitbake': {
|
||||
'Meta': {'object_name': 'BRBitbake'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
|
||||
},
|
||||
u'bldcontrol.brerror': {
|
||||
'Meta': {'object_name': 'BRError'},
|
||||
'errmsg': ('django.db.models.fields.TextField', [], {}),
|
||||
'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'traceback': ('django.db.models.fields.TextField', [], {})
|
||||
},
|
||||
u'bldcontrol.brlayer': {
|
||||
'Meta': {'object_name': 'BRLayer'},
|
||||
'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
|
||||
},
|
||||
u'bldcontrol.brtarget': {
|
||||
'Meta': {'object_name': 'BRTarget'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
|
||||
},
|
||||
u'bldcontrol.brvariable': {
|
||||
'Meta': {'object_name': 'BRVariable'},
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
|
||||
'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildenvironment': {
|
||||
'Meta': {'object_name': 'BuildEnvironment'},
|
||||
'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
|
||||
'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
|
||||
'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
|
||||
'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
|
||||
'betype': ('django.db.models.fields.IntegerField', [], {}),
|
||||
'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'bldcontrol.buildrequest': {
|
||||
'Meta': {'object_name': 'BuildRequest'},
|
||||
'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
|
||||
'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
|
||||
},
|
||||
u'orm.bitbakeversion': {
|
||||
'Meta': {'object_name': 'BitbakeVersion'},
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
},
|
||||
u'orm.build': {
|
||||
'Meta': {'object_name': 'Build'},
|
||||
'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
|
||||
'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
|
||||
'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
|
||||
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
|
||||
'started_on': ('django.db.models.fields.DateTimeField', [], {}),
|
||||
'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
|
||||
'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
|
||||
},
|
||||
u'orm.project': {
|
||||
'Meta': {'object_name': 'Project'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
|
||||
'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
|
||||
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
|
||||
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
|
||||
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
|
||||
},
|
||||
u'orm.release': {
|
||||
'Meta': {'object_name': 'Release'},
|
||||
'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
|
||||
'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
|
||||
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
|
||||
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
|
||||
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
|
||||
}
|
||||
}
|
||||
|
||||
complete_apps = ['bldcontrol']
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user