mirror of
https://git.yoctoproject.org/poky
synced 2026-01-31 05:48:43 +01:00
Compare commits
468 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
73cc31c11a | ||
|
|
444dc2e99b | ||
|
|
bddb60b101 | ||
|
|
1083d90888 | ||
|
|
54e3f82bd7 | ||
|
|
426bc4c357 | ||
|
|
3ca9f90dff | ||
|
|
ccc964cf9f | ||
|
|
50fdd78423 | ||
|
|
3cf0e09348 | ||
|
|
4515fc9529 | ||
|
|
628aea354d | ||
|
|
3565a9697f | ||
|
|
fe7fb00221 | ||
|
|
7241042b70 | ||
|
|
546c0cffca | ||
|
|
224e04d6ce | ||
|
|
172105c1ef | ||
|
|
0fa93e1412 | ||
|
|
d54e1f4ff5 | ||
|
|
b24988bec7 | ||
|
|
a220e2ca34 | ||
|
|
3ac7c847e8 | ||
|
|
80b35ed1a2 | ||
|
|
7b9e031355 | ||
|
|
cb5649cbb8 | ||
|
|
dd20601980 | ||
|
|
d3c0a560a8 | ||
|
|
62685cbff5 | ||
|
|
3d9f6dc163 | ||
|
|
8aea6ad597 | ||
|
|
051883f877 | ||
|
|
0c78f81485 | ||
|
|
98f3e83884 | ||
|
|
819f7c3d03 | ||
|
|
4245995f76 | ||
|
|
0e8fcf8c9c | ||
|
|
13f0eee08d | ||
|
|
6eb266a365 | ||
|
|
577eb635ab | ||
|
|
553d5f65e8 | ||
|
|
47ef871649 | ||
|
|
db0832ead6 | ||
|
|
863bfa81af | ||
|
|
014af27dcb | ||
|
|
ca4703b6cf | ||
|
|
98e368e4b6 | ||
|
|
3c61ee2f68 | ||
|
|
ec00137169 | ||
|
|
11b217d60b | ||
|
|
c71ea3831a | ||
|
|
3428c1db71 | ||
|
|
4cd7b56228 | ||
|
|
5dd02c6db1 | ||
|
|
0ed07f2658 | ||
|
|
c33bac8883 | ||
|
|
c76d565ce2 | ||
|
|
04f04d0d17 | ||
|
|
d8cbc618cc | ||
|
|
1c73e41159 | ||
|
|
212ca3bee1 | ||
|
|
384801e827 | ||
|
|
5c9148ff6a | ||
|
|
cec5e508ec | ||
|
|
ddc6a9f5cd | ||
|
|
8b50a8676b | ||
|
|
12afe3c057 | ||
|
|
f5e807efc7 | ||
|
|
cf7507f8c4 | ||
|
|
eb0dff0c98 | ||
|
|
9a72d46aed | ||
|
|
ad2cce0f1e | ||
|
|
c96936cfd9 | ||
|
|
047e58b4ba | ||
|
|
485e244db8 | ||
|
|
e8676b4f1a | ||
|
|
cef5f86f43 | ||
|
|
1a2ec16ec0 | ||
|
|
035c33c405 | ||
|
|
8aaffcd59a | ||
|
|
73274f258a | ||
|
|
b291829cfc | ||
|
|
1bccc216ee | ||
|
|
8f4b7758b5 | ||
|
|
95dae8b598 | ||
|
|
129060f0b7 | ||
|
|
f69b958176 | ||
|
|
c3c14808dc | ||
|
|
c60a0a51d7 | ||
|
|
e59717e80f | ||
|
|
b4df9df462 | ||
|
|
ae9b341ecf | ||
|
|
3bf928a3b6 | ||
|
|
0742e8a43b | ||
|
|
cca8dd15c8 | ||
|
|
8e4188e274 | ||
|
|
0ad194919f | ||
|
|
49a01fd044 | ||
|
|
0aedf304e5 | ||
|
|
f0f6acac03 | ||
|
|
bae35b3e5f | ||
|
|
2de121703d | ||
|
|
8a12e713f9 | ||
|
|
2b0f105e59 | ||
|
|
c9f172aa5e | ||
|
|
f7e1cd9f85 | ||
|
|
ec240f45ae | ||
|
|
e92679a6eb | ||
|
|
f979c50029 | ||
|
|
a6b8fda00c | ||
|
|
1b9a98f78c | ||
|
|
d72e66f34b | ||
|
|
e2c2d723ed | ||
|
|
478a38187f | ||
|
|
cc811f4992 | ||
|
|
f7ec29ca3f | ||
|
|
0d390bfb5a | ||
|
|
ca9d26a08d | ||
|
|
49de8caab0 | ||
|
|
d672a4cc3c | ||
|
|
be15df5099 | ||
|
|
2cb87d12d2 | ||
|
|
57531002b8 | ||
|
|
c4061a0a68 | ||
|
|
6962ee3689 | ||
|
|
191666022a | ||
|
|
53766fb01f | ||
|
|
3134fb2861 | ||
|
|
b169435134 | ||
|
|
95e3d71080 | ||
|
|
2de1a5cefb | ||
|
|
a7c3e18de0 | ||
|
|
bd2cc670be | ||
|
|
b108f2a6de | ||
|
|
2fcc8d6e52 | ||
|
|
c3c25ac53d | ||
|
|
7343438092 | ||
|
|
8f5becc3ab | ||
|
|
732dd581f3 | ||
|
|
40f4a6d075 | ||
|
|
88b7f1a1e2 | ||
|
|
8e2ab57852 | ||
|
|
204b2bae4a | ||
|
|
e93596fe74 | ||
|
|
56a27c9aad | ||
|
|
4b27738c5e | ||
|
|
529bbe2cc2 | ||
|
|
82641d700d | ||
|
|
118f7a2247 | ||
|
|
5b24e5b39b | ||
|
|
a78dddb624 | ||
|
|
3b3cdfd71a | ||
|
|
ed4ed5313b | ||
|
|
de056577ce | ||
|
|
2ea93e2b1d | ||
|
|
2b330e5439 | ||
|
|
e08094e604 | ||
|
|
5f97311702 | ||
|
|
7026b2b05a | ||
|
|
8e5e92193a | ||
|
|
06ed5c5a10 | ||
|
|
9fa0bc4500 | ||
|
|
82017f2367 | ||
|
|
e1e5b18a5e | ||
|
|
9995a7a144 | ||
|
|
9fd6b093a4 | ||
|
|
b7bb83a4bb | ||
|
|
45bc60015c | ||
|
|
e6c1d03d3d | ||
|
|
d2ca721d31 | ||
|
|
260ff60f93 | ||
|
|
71291ed53e | ||
|
|
5b3af2abd7 | ||
|
|
70c4134e4b | ||
|
|
90dd677528 | ||
|
|
6db9299d9e | ||
|
|
2561b58ac8 | ||
|
|
9e14b83fa4 | ||
|
|
a8ac03fce1 | ||
|
|
8b9b998258 | ||
|
|
76aa0c3d5d | ||
|
|
11c8c8aa15 | ||
|
|
5a8a6a753f | ||
|
|
aa4b7b2257 | ||
|
|
ea62893915 | ||
|
|
990b8e7919 | ||
|
|
db8258864e | ||
|
|
58538b0703 | ||
|
|
96fe15caf6 | ||
|
|
b6e4966874 | ||
|
|
a837c6be8f | ||
|
|
414aad04b6 | ||
|
|
8a7607f470 | ||
|
|
cce2867828 | ||
|
|
0458275013 | ||
|
|
6f60d91adc | ||
|
|
642890f5d0 | ||
|
|
5368cfee9e | ||
|
|
e588da43b0 | ||
|
|
c32c7522e5 | ||
|
|
62696defc0 | ||
|
|
deca0d3736 | ||
|
|
a220c3a1a9 | ||
|
|
ef6ff739c7 | ||
|
|
d9369d1ea0 | ||
|
|
17e4586d6e | ||
|
|
6175bd0930 | ||
|
|
016df260e5 | ||
|
|
5d781f41ff | ||
|
|
d3ee5489c9 | ||
|
|
9a1694e242 | ||
|
|
3cc3ff6244 | ||
|
|
16f046f38f | ||
|
|
7639be6851 | ||
|
|
a10c9109e2 | ||
|
|
6ac72e8be2 | ||
|
|
c594ff73ab | ||
|
|
b9e99832b9 | ||
|
|
440e3cd2c2 | ||
|
|
977dd47c69 | ||
|
|
2b029e56f9 | ||
|
|
b6f4d24fbc | ||
|
|
ab4f42608a | ||
|
|
23aabca217 | ||
|
|
d9d046c28a | ||
|
|
b6bb27c4c9 | ||
|
|
0271b3ab00 | ||
|
|
4cbb398d85 | ||
|
|
66a4366e8f | ||
|
|
b64fa0af89 | ||
|
|
8f300880c4 | ||
|
|
28344dfed4 | ||
|
|
8c69f7d56c | ||
|
|
aad7166704 | ||
|
|
6980d4fa2f | ||
|
|
094a36886f | ||
|
|
7e11efef59 | ||
|
|
8854de1ffd | ||
|
|
cefa06d985 | ||
|
|
ecb5183b9a | ||
|
|
2bb93e3567 | ||
|
|
2a17af9652 | ||
|
|
3831cdc1b1 | ||
|
|
e01993c3d5 | ||
|
|
7d70e67479 | ||
|
|
fc75bea445 | ||
|
|
59ef3c315b | ||
|
|
eef3fb99d0 | ||
|
|
12eb72ee3b | ||
|
|
a8377d1073 | ||
|
|
52e13fb007 | ||
|
|
9f0eaae229 | ||
|
|
cf181cdb52 | ||
|
|
6f9ef13d0a | ||
|
|
4c36d5209e | ||
|
|
e177680fa0 | ||
|
|
365f85179d | ||
|
|
8ffab431a2 | ||
|
|
ebed0191f9 | ||
|
|
a779b36e9c | ||
|
|
ea438b421d | ||
|
|
45f2a20349 | ||
|
|
853db300f5 | ||
|
|
f07fedb2fb | ||
|
|
1969871269 | ||
|
|
a8279122b9 | ||
|
|
9adc11d4ac | ||
|
|
b5a67a2f7b | ||
|
|
bd47f3f3e6 | ||
|
|
1d7983106c | ||
|
|
4cf38836ac | ||
|
|
ffb615a50b | ||
|
|
1931dfc1cb | ||
|
|
2be23abe85 | ||
|
|
9891a867ef | ||
|
|
e1f49c6068 | ||
|
|
046fd3cb83 | ||
|
|
913b4e5910 | ||
|
|
046f1e6b4c | ||
|
|
e5353a9158 | ||
|
|
f5da2a5913 | ||
|
|
e244da150b | ||
|
|
c4d6b277f2 | ||
|
|
e89b6b84d8 | ||
|
|
325515a685 | ||
|
|
c2d93dcf42 | ||
|
|
116ee14fe0 | ||
|
|
b869068751 | ||
|
|
4eeaae772f | ||
|
|
4dc76844a6 | ||
|
|
d85ccb3daf | ||
|
|
4a98ef84b9 | ||
|
|
b6a3c9c298 | ||
|
|
fbe10c86e8 | ||
|
|
c96f149679 | ||
|
|
f73006031e | ||
|
|
c3f90184c7 | ||
|
|
ec0de3b71e | ||
|
|
54086de158 | ||
|
|
6dca3c67c3 | ||
|
|
88023056fe | ||
|
|
37b3e44b9d | ||
|
|
cb7787af8a | ||
|
|
3ee0f6afc8 | ||
|
|
b3acdca9b6 | ||
|
|
7204ed57ed | ||
|
|
7db217e7ac | ||
|
|
553ffcb941 | ||
|
|
ba4d4372b1 | ||
|
|
4f2716218f | ||
|
|
08e0391d9c | ||
|
|
5ddf1463d3 | ||
|
|
5f4369eb2a | ||
|
|
20aae4e5ef | ||
|
|
d4280db033 | ||
|
|
d79f5a98f7 | ||
|
|
22198f07af | ||
|
|
03d9d8e7d3 | ||
|
|
229e3e4e5f | ||
|
|
6ea7b46ef6 | ||
|
|
98c57bb512 | ||
|
|
ae849a348c | ||
|
|
95b2e086cb | ||
|
|
eea30774b4 | ||
|
|
250212eee6 | ||
|
|
e6917603e2 | ||
|
|
320dacf891 | ||
|
|
11ca5f99a7 | ||
|
|
cd0afe151c | ||
|
|
f7b994b752 | ||
|
|
720ae18403 | ||
|
|
ae832446d9 | ||
|
|
ba29029581 | ||
|
|
a4d74c100d | ||
|
|
b8e749ddd6 | ||
|
|
6240f7092e | ||
|
|
b2a6a89a29 | ||
|
|
39d2072ae9 | ||
|
|
8bdaefd8bd | ||
|
|
74f34dc4d2 | ||
|
|
e91e5324d0 | ||
|
|
99f695ac99 | ||
|
|
51e4dabf70 | ||
|
|
ff7c814661 | ||
|
|
ac84a1ce15 | ||
|
|
2bf1e70e3d | ||
|
|
ec2d08375b | ||
|
|
217448b911 | ||
|
|
662840a9ac | ||
|
|
1e583b1eb8 | ||
|
|
682cb00f04 | ||
|
|
726a2bf3bd | ||
|
|
5658888f11 | ||
|
|
6a268a6cf1 | ||
|
|
b2ab8f4321 | ||
|
|
76a4804f2b | ||
|
|
71ee363046 | ||
|
|
28066b3a21 | ||
|
|
4b57c55182 | ||
|
|
073c0ba55e | ||
|
|
727504235c | ||
|
|
6c56ed7b02 | ||
|
|
306cd99e98 | ||
|
|
f2b952fe99 | ||
|
|
b17a009b65 | ||
|
|
426eb13fa9 | ||
|
|
4007b48cf0 | ||
|
|
e506807f54 | ||
|
|
5a70719762 | ||
|
|
7901b12541 | ||
|
|
3674b577f6 | ||
|
|
9f0741a613 | ||
|
|
31351ce146 | ||
|
|
1c89eae86b | ||
|
|
95441efe6e | ||
|
|
5465517c85 | ||
|
|
d38658b784 | ||
|
|
abae515395 | ||
|
|
55b9718bd8 | ||
|
|
97753ee3ad | ||
|
|
fa602e6cc9 | ||
|
|
2806bed309 | ||
|
|
896b2a0a33 | ||
|
|
e576607c2d | ||
|
|
4fe89e0acd | ||
|
|
ff5b6b7eb1 | ||
|
|
83731d04b3 | ||
|
|
5cf3b562c3 | ||
|
|
613a74fcc8 | ||
|
|
4dd2808856 | ||
|
|
f43a689b3c | ||
|
|
9f968375b4 | ||
|
|
b5fcf4ec1b | ||
|
|
386c7c6ff5 | ||
|
|
c29636c369 | ||
|
|
99b89011f1 | ||
|
|
786f7eec78 | ||
|
|
3d6178ed1d | ||
|
|
73761bd8ac | ||
|
|
1ad7757cbb | ||
|
|
720c926271 | ||
|
|
e89f4e531f | ||
|
|
7925e8942f | ||
|
|
3a1b40b685 | ||
|
|
5b6e5ab134 | ||
|
|
613fee3563 | ||
|
|
7b10fd2026 | ||
|
|
5581f5a0f6 | ||
|
|
9825580d79 | ||
|
|
0df81c8485 | ||
|
|
2aae36ea43 | ||
|
|
7691471070 | ||
|
|
d8d2fa887d | ||
|
|
226d54067e | ||
|
|
200f6c8c35 | ||
|
|
7f9a10b861 | ||
|
|
b347363768 | ||
|
|
70918fbf25 | ||
|
|
d089de0d16 | ||
|
|
b3dc50e620 | ||
|
|
64512cbab8 | ||
|
|
8e44c6bff9 | ||
|
|
2fcce54c83 | ||
|
|
1b769a0774 | ||
|
|
be8dfdcb5a | ||
|
|
19f89f76bb | ||
|
|
4376fb8517 | ||
|
|
8f51f6153a | ||
|
|
829706d3c5 | ||
|
|
da4bfbef46 | ||
|
|
eff84a76ac | ||
|
|
e8898f0188 | ||
|
|
5d11ed7162 | ||
|
|
22f8a46d2d | ||
|
|
cc2522771e | ||
|
|
9e01e2ee5c | ||
|
|
f000d11753 | ||
|
|
9930fca92b | ||
|
|
387560a718 | ||
|
|
0e5cbe52b0 | ||
|
|
3261f47b97 | ||
|
|
b275edd305 | ||
|
|
0011368622 | ||
|
|
0461a6d433 | ||
|
|
b1df214a43 | ||
|
|
9c353b0849 | ||
|
|
3be5cc1cb9 | ||
|
|
2956e5ab34 | ||
|
|
de9f4b6982 | ||
|
|
f911299295 | ||
|
|
67454487be | ||
|
|
c0d988e676 | ||
|
|
42a4637a99 | ||
|
|
5b97ffa980 | ||
|
|
93f29f536e | ||
|
|
5a1ac4ea59 | ||
|
|
3aa988ef77 | ||
|
|
75ca532114 | ||
|
|
898a78357e | ||
|
|
4f483c7390 | ||
|
|
bdb4b02a08 | ||
|
|
c914668db2 | ||
|
|
badbddadcd | ||
|
|
5448ecf1a2 | ||
|
|
9b1af2eb0c | ||
|
|
cf88da9ae0 | ||
|
|
f832db4bbd | ||
|
|
9f96ef9d98 |
@@ -5,9 +5,8 @@ The following external components are distributed with this software:
|
||||
* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software
|
||||
Foundation and individual contributors.
|
||||
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the MIT license
|
||||
* Twitter Bootstrap (including Glyphicons), redistributed under the Apache License 2.0.
|
||||
|
||||
* jQuery is redistributed under the MIT license.
|
||||
|
||||
* Twitter typeahead.js redistributed under the MIT license. Note that the JS source has one small modification, so the full unminified file is currently included to make it obvious where this is.
|
||||
|
||||
* QUnit is redistributed under the MIT license.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
@@ -35,10 +35,7 @@ except RuntimeError as exc:
|
||||
from bb import cookerdata
|
||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
__version__ = "1.31.0"
|
||||
__version__ = "1.30.0"
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __version__ != bb.__version__:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
# bitbake-diffsigs
|
||||
# BitBake task signature data comparison utility
|
||||
@@ -24,7 +24,6 @@ import warnings
|
||||
import fnmatch
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
@@ -96,7 +95,7 @@ def find_compare_task(bbhandler, pn, taskname):
|
||||
# Recurse into signature comparison
|
||||
output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
@@ -122,6 +121,7 @@ else:
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
import cPickle
|
||||
try:
|
||||
if len(args) == 2:
|
||||
output = bb.siggen.dump_sigfile(sys.argv[1])
|
||||
@@ -130,9 +130,9 @@ else:
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
# bitbake-dumpsig
|
||||
# BitBake task signature dump utility
|
||||
@@ -23,7 +23,6 @@ import sys
|
||||
import warnings
|
||||
import optparse
|
||||
import logging
|
||||
import pickle
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
@@ -52,14 +51,15 @@ options, args = parser.parse_args(sys.argv)
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
else:
|
||||
import cPickle
|
||||
try:
|
||||
output = bb.siggen.dump_sigfile(args[1])
|
||||
except IOError as e:
|
||||
logger.error(str(e))
|
||||
sys.exit(1)
|
||||
except (pickle.UnpicklingError, EOFError):
|
||||
except cPickle.UnpicklingError, EOFError:
|
||||
logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
|
||||
sys.exit(1)
|
||||
|
||||
if output:
|
||||
print('\n'.join(output))
|
||||
print '\n'.join(output)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys,logging
|
||||
import optparse
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -10,12 +10,8 @@ import bb
|
||||
import select
|
||||
import errno
|
||||
import signal
|
||||
import pickle
|
||||
from multiprocessing import Lock
|
||||
|
||||
if sys.getfilesystemencoding() != "utf-8":
|
||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
||||
|
||||
# Users shouldn't be running this code directly
|
||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
||||
@@ -34,16 +30,19 @@ if sys.argv[1].startswith("decafbadbad"):
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
import fcntl
|
||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
||||
fl |= os.O_SYNC
|
||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
||||
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
worker_pipe = sys.stdout.fileno()
|
||||
bb.utils.nonblockingfd(worker_pipe)
|
||||
# Need to guard against multiprocessing being used in child processes
|
||||
@@ -63,10 +62,10 @@ if 0:
|
||||
consolelog.setFormatter(conlogformat)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
worker_queue = b""
|
||||
worker_queue = ""
|
||||
|
||||
def worker_fire(event, d):
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
worker_fire_prepickled(data)
|
||||
|
||||
def worker_fire_prepickled(event):
|
||||
@@ -92,7 +91,7 @@ def worker_child_fire(event, d):
|
||||
global worker_pipe
|
||||
global worker_pipe_lock
|
||||
|
||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
||||
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||
try:
|
||||
worker_pipe_lock.acquire()
|
||||
worker_pipe.write(data)
|
||||
@@ -160,8 +159,7 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
pipeout = os.fdopen(pipeout, 'wb', 0)
|
||||
pid = os.fork()
|
||||
except OSError as e:
|
||||
logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
sys.exit(1)
|
||||
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
|
||||
|
||||
if pid == 0:
|
||||
def child():
|
||||
@@ -209,24 +207,14 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
||||
exports = bb.data.exported_vars(the_data)
|
||||
|
||||
bb.utils.empty_environment()
|
||||
for e, v in exports:
|
||||
os.environ[e] = v
|
||||
|
||||
for e in fakeenv:
|
||||
os.environ[e] = fakeenv[e]
|
||||
the_data.setVar(e, fakeenv[e])
|
||||
the_data.setVarFlag(e, 'export', "1")
|
||||
|
||||
task_exports = the_data.getVarFlag(taskname, 'exports', True)
|
||||
if task_exports:
|
||||
for e in task_exports.split():
|
||||
the_data.setVarFlag(e, 'export', '1')
|
||||
v = the_data.getVar(e, True)
|
||||
if v is not None:
|
||||
os.environ[e] = v
|
||||
|
||||
if quieterrors:
|
||||
the_data.setVarFlag(taskname, "quieterrors", "1")
|
||||
|
||||
@@ -252,7 +240,7 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
|
||||
bb.utils.process_profilelog(profname)
|
||||
os._exit(ret)
|
||||
else:
|
||||
for key, value in iter(envbackup.items()):
|
||||
for key, value in envbackup.iteritems():
|
||||
if value is None:
|
||||
del os.environ[key]
|
||||
else:
|
||||
@@ -269,22 +257,22 @@ class runQueueWorkerPipe():
|
||||
if pipeout:
|
||||
pipeout.close()
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
self.queue = ""
|
||||
|
||||
def read(self):
|
||||
start = len(self.queue)
|
||||
try:
|
||||
self.queue = self.queue + (self.input.read(102400) or b"")
|
||||
self.queue = self.queue + self.input.read(102400)
|
||||
except (OSError, IOError) as e:
|
||||
if e.errno != errno.EAGAIN:
|
||||
raise
|
||||
|
||||
end = len(self.queue)
|
||||
index = self.queue.find(b"</event>")
|
||||
index = self.queue.find("</event>")
|
||||
while index != -1:
|
||||
worker_fire_prepickled(self.queue[:index+8])
|
||||
self.queue = self.queue[index+8:]
|
||||
index = self.queue.find(b"</event>")
|
||||
index = self.queue.find("</event>")
|
||||
return (end > start)
|
||||
|
||||
def close(self):
|
||||
@@ -300,7 +288,7 @@ class BitbakeWorker(object):
|
||||
def __init__(self, din):
|
||||
self.input = din
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
self.queue = b""
|
||||
self.queue = ""
|
||||
self.cookercfg = None
|
||||
self.databuilder = None
|
||||
self.data = None
|
||||
@@ -337,12 +325,12 @@ class BitbakeWorker(object):
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if len(self.queue):
|
||||
self.handle_item(b"cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item(b"workerdata", self.handle_workerdata)
|
||||
self.handle_item(b"runtask", self.handle_runtask)
|
||||
self.handle_item(b"finishnow", self.handle_finishnow)
|
||||
self.handle_item(b"ping", self.handle_ping)
|
||||
self.handle_item(b"quit", self.handle_quit)
|
||||
self.handle_item("cookerconfig", self.handle_cookercfg)
|
||||
self.handle_item("workerdata", self.handle_workerdata)
|
||||
self.handle_item("runtask", self.handle_runtask)
|
||||
self.handle_item("finishnow", self.handle_finishnow)
|
||||
self.handle_item("ping", self.handle_ping)
|
||||
self.handle_item("quit", self.handle_quit)
|
||||
|
||||
for pipe in self.build_pipes:
|
||||
self.build_pipes[pipe].read()
|
||||
@@ -352,12 +340,12 @@ class BitbakeWorker(object):
|
||||
|
||||
|
||||
def handle_item(self, item, func):
|
||||
if self.queue.startswith(b"<" + item + b">"):
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
if self.queue.startswith("<" + item + ">"):
|
||||
index = self.queue.find("</" + item + ">")
|
||||
while index != -1:
|
||||
func(self.queue[(len(item) + 2):index])
|
||||
self.queue = self.queue[(index + len(item) + 3):]
|
||||
index = self.queue.find(b"</" + item + b">")
|
||||
index = self.queue.find("</" + item + ">")
|
||||
|
||||
def handle_cookercfg(self, data):
|
||||
self.cookercfg = pickle.loads(data)
|
||||
@@ -376,7 +364,7 @@ class BitbakeWorker(object):
|
||||
def handle_ping(self, _):
|
||||
workerlog_write("Handling ping\n")
|
||||
|
||||
logger.warning("Pong from bitbake-worker!")
|
||||
logger.warn("Pong from bitbake-worker!")
|
||||
|
||||
def handle_quit(self, data):
|
||||
workerlog_write("Handling quit\n")
|
||||
@@ -421,12 +409,12 @@ class BitbakeWorker(object):
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
|
||||
worker_fire_prepickled("<exitcode>" + pickle.dumps((task, status)) + "</exitcode>")
|
||||
|
||||
def handle_finishnow(self, _):
|
||||
if self.build_pids:
|
||||
logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
|
||||
for k, v in iter(self.build_pids.items()):
|
||||
for k, v in self.build_pids.iteritems():
|
||||
try:
|
||||
os.kill(-k, signal.SIGTERM)
|
||||
os.waitpid(-1, 0)
|
||||
@@ -436,7 +424,6 @@ class BitbakeWorker(object):
|
||||
self.build_pipes[pipe].read()
|
||||
|
||||
try:
|
||||
sys.stdin = sys.stdin.detach()
|
||||
worker = BitbakeWorker(sys.stdin)
|
||||
if not profiling:
|
||||
worker.serve()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
|
||||
122
bitbake/bin/image-writer
Executable file
122
bitbake/bin/image-writer
Executable file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
|
||||
os.path.abspath(__file__))), 'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
import gtk
|
||||
import optparse
|
||||
import pygtk
|
||||
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.deployimagedialog import DeployImageDialog
|
||||
from bb.ui.crumbs.hig.imageselectiondialog import ImageSelectionDialog
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "ext4", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
def __init__(self, image_path=''):
|
||||
super(DeployWindow, self).__init__()
|
||||
|
||||
if len(image_path) > 0:
|
||||
valid = True
|
||||
if not os.path.exists(image_path):
|
||||
valid = False
|
||||
lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> to select an image." % image_path
|
||||
else:
|
||||
image_path = os.path.abspath(image_path)
|
||||
extend_name = os.path.splitext(image_path)[1][1:]
|
||||
if extend_name not in DEPLOYABLE_IMAGE_TYPES:
|
||||
valid = False
|
||||
lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> to select an image." % extend_name
|
||||
|
||||
if not valid:
|
||||
image_path = ''
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
self.deploy_dialog = DeployImageDialog(Title, image_path, self,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
|
||||
close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(close_button)
|
||||
close_button.connect('clicked', gtk.main_quit)
|
||||
|
||||
write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(write_button)
|
||||
|
||||
self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
|
||||
self.deploy_dialog.connect('destroy', gtk.main_quit)
|
||||
response = self.deploy_dialog.show()
|
||||
|
||||
def select_image_clicked_cb(self, dialog):
|
||||
cwd = os.getcwd()
|
||||
dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(button)
|
||||
response = dialog.run()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
# get the full path of image
|
||||
image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
|
||||
self.deploy_dialog.set_image_text_buffer(image_path)
|
||||
self.deploy_dialog.set_image_path(image_path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog [-h] [image_file]
|
||||
|
||||
%prog writes bootable images to USB devices. You can
|
||||
provide the image file on the command line or select it using the GUI.""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
image_file = args[1] if len(args) > 1 else ''
|
||||
dw = DeployWindow(image_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
@@ -17,12 +17,10 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see http://www.gnu.org/licenses/.
|
||||
|
||||
HELP="
|
||||
Usage: source toaster start|stop [webport=<address:port>] [noweb]
|
||||
Optional arguments:
|
||||
[noweb] Setup the environment for building with toaster but don't start the development server
|
||||
[webport] Set the development server (default: localhost:8000)
|
||||
"
|
||||
# Usage: source toaster [start|stop]
|
||||
# [webport=<port>] [noui] [noweb]
|
||||
|
||||
# Helper function to kill a background toaster development server
|
||||
|
||||
webserverKillAll()
|
||||
{
|
||||
@@ -69,7 +67,7 @@ webserverStartAll()
|
||||
|
||||
echo "Starting webserver..."
|
||||
|
||||
$MANAGE runserver "$ADDR_PORT" \
|
||||
$MANAGE runserver "0.0.0.0:$WEB_PORT" \
|
||||
</dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
|
||||
& echo $! >${BUILDDIR}/.toastermain.pid
|
||||
|
||||
@@ -79,8 +77,7 @@ webserverStartAll()
|
||||
retval=1
|
||||
rm "${BUILDDIR}/.toastermain.pid"
|
||||
else
|
||||
echo "Toaster development webserver started at http://$ADDR_PORT"
|
||||
echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
|
||||
echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
|
||||
fi
|
||||
|
||||
return $retval
|
||||
@@ -100,6 +97,7 @@ stop_system()
|
||||
fi
|
||||
webserverKillAll
|
||||
# unset exported variables
|
||||
unset DATABASE_URL
|
||||
unset TOASTER_CONF
|
||||
unset TOASTER_DIR
|
||||
unset BITBAKE_UI
|
||||
@@ -111,11 +109,11 @@ stop_system()
|
||||
|
||||
verify_prereq() {
|
||||
# Verify Django version
|
||||
reqfile=$(python3 -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
|
||||
reqfile=$(python -c "import os; print os.path.realpath('$BBBASEDIR/toaster-requirements.txt')")
|
||||
exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
|
||||
exp=$exp'import sys,django;version=django.get_version().split(".");'
|
||||
exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
|
||||
if ! sed -n "$exp" $reqfile | python3 - ; then
|
||||
if ! sed -n "$exp" $reqfile | python - ; then
|
||||
req=`grep ^Django $reqfile`
|
||||
echo "This program needs $req"
|
||||
echo "Please install with pip install -r $reqfile"
|
||||
@@ -135,7 +133,7 @@ else
|
||||
fi
|
||||
|
||||
export BBBASEDIR=`dirname $TOASTER`/..
|
||||
MANAGE="python3 $BBBASEDIR/lib/toaster/manage.py"
|
||||
MANAGE=$BBBASEDIR/lib/toaster/manage.py
|
||||
OEROOT=`dirname $TOASTER`/../..
|
||||
|
||||
# this is the configuraton file we are using for toaster
|
||||
@@ -162,7 +160,7 @@ fi
|
||||
|
||||
if [ "$TOASTER_CONF" = "" ]; then
|
||||
TOASTER_CONF="$TEMPLATECONF/toasterconf.json"
|
||||
export TOASTER_CONF=$(python3 -c "import os; print(os.path.realpath('$TOASTER_CONF'))")
|
||||
export TOASTER_CONF=$(python -c "import os; print os.path.realpath('$TOASTER_CONF')")
|
||||
fi
|
||||
|
||||
if [ ! -f $TOASTER_CONF ]; then
|
||||
@@ -180,7 +178,7 @@ fi
|
||||
export TOASTER_DIR=`pwd`
|
||||
|
||||
WEBSERVER=1
|
||||
ADDR_PORT="localhost:8000"
|
||||
WEB_PORT="8000"
|
||||
unset CMD
|
||||
for param in $*; do
|
||||
case $param in
|
||||
@@ -194,20 +192,7 @@ for param in $*; do
|
||||
CMD=$param
|
||||
;;
|
||||
webport=*)
|
||||
ADDR_PORT="${param#*=}"
|
||||
# Split the addr:port string
|
||||
ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
|
||||
PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
|
||||
# If only a port has been speified then set address to localhost.
|
||||
if [ $ADDR = $PORT ] ; then
|
||||
ADDR_PORT="localhost:$PORT"
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "$HELP"
|
||||
return 1
|
||||
;;
|
||||
|
||||
WEB_PORT="${param#*=}"
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -241,9 +226,11 @@ if [ "$CMD" = "start" ] ; then
|
||||
return 1
|
||||
fi
|
||||
elif [ "$CMD" = "" ]; then
|
||||
echo "No command specified"
|
||||
echo "$HELP"
|
||||
return 1
|
||||
if [ -z "$BBSERVER" ]; then
|
||||
CMD="start"
|
||||
else
|
||||
CMD="stop"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "The system will $CMD."
|
||||
@@ -254,9 +241,7 @@ case $CMD in
|
||||
start )
|
||||
# check if addr:port is not in use
|
||||
if [ "$CMD" == 'start' ]; then
|
||||
if [ $WEBSERVER -gt 0 ]; then
|
||||
$MANAGE checksocket "$ADDR_PORT" || return 1
|
||||
fi
|
||||
$MANAGE checksocket "0.0.0.0:$WEB_PORT" || return 1
|
||||
fi
|
||||
|
||||
# kill Toaster web server if it's alive
|
||||
@@ -277,6 +262,7 @@ case $CMD in
|
||||
return 4
|
||||
fi
|
||||
export BITBAKE_UI='toasterui'
|
||||
export DATABASE_URL=`$MANAGE get-dburl`
|
||||
$MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid
|
||||
# set fail safe stop system on terminal exit
|
||||
trap stop_system SIGHUP
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
|
||||
@@ -29,14 +29,14 @@ import warnings
|
||||
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
|
||||
from bb.cache import CoreRecipeInfo
|
||||
|
||||
import pickle as pickle
|
||||
import cPickle as pickle
|
||||
|
||||
def main(argv=None):
|
||||
"""
|
||||
Get the mapping for the target recipe.
|
||||
"""
|
||||
if len(argv) != 1:
|
||||
print("Error, need one argument!", file=sys.stderr)
|
||||
print >>sys.stderr, "Error, need one argument!"
|
||||
return 2
|
||||
|
||||
cachefile = argv[0]
|
||||
@@ -56,7 +56,7 @@ def main(argv=None):
|
||||
continue
|
||||
|
||||
# 1.0 is the default version for a no PV recipe.
|
||||
if "pv" in val.__dict__:
|
||||
if val.__dict__.has_key("pv"):
|
||||
pv = val.pv
|
||||
else:
|
||||
pv = "1.0"
|
||||
|
||||
@@ -134,7 +134,7 @@
|
||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
||||
</para></listitem>
|
||||
<listitem><para>
|
||||
<ulink url="http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
<ulink url="https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</note>
|
||||
@@ -269,7 +269,7 @@
|
||||
and define some key BitBake variables.
|
||||
For more information on the <filename>bitbake.conf</filename>,
|
||||
see
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
||||
</para>
|
||||
<para>Use the following commands to create the <filename>conf</filename>
|
||||
directory in the project directory:
|
||||
@@ -354,7 +354,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
supporting.
|
||||
For more information on the <filename>base.bbclass</filename> file,
|
||||
you can look at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
||||
After making sure that the <filename>classes/base.bbclass</filename>
|
||||
@@ -376,7 +376,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
Thus, this example creates and uses a layer called "mylayer".
|
||||
<note>
|
||||
You can find additional information on adding a layer at
|
||||
<ulink url='http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
||||
</note>
|
||||
</para>
|
||||
<para>Minimally, you need a recipe file and a layer configuration
|
||||
@@ -399,7 +399,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
|
||||
<link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
|
||||
|
||||
<link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
|
||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
|
||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR}/"
|
||||
</literallayout>
|
||||
For information on these variables, click the links
|
||||
to go to the definitions in the glossary.</para>
|
||||
|
||||
@@ -1636,17 +1636,6 @@
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-LAYERDIR_RE'><glossterm>LAYERDIR_RE</glossterm>
|
||||
<glossdef>
|
||||
<para>When used inside the <filename>layer.conf</filename> configuration
|
||||
file, this variable provides the path of the current layer,
|
||||
escaped for use in a regular expression
|
||||
(<link linkend='var-BBFILE_PATTERN'><filename>BBFILE_PATTERN</filename></link>).
|
||||
This variable is not available outside of <filename>layer.conf</filename>
|
||||
and references are expanded immediately when parsing of the file completes.</para>
|
||||
</glossdef>
|
||||
</glossentry>
|
||||
|
||||
<glossentry id='var-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
|
||||
<glossdef>
|
||||
<para>Optionally specifies the version of a layer as a single number.
|
||||
|
||||
@@ -23,17 +23,19 @@
|
||||
# Assign a file to __warn__ to get warnings about slow operations.
|
||||
#
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import copy
|
||||
import types
|
||||
ImmutableTypes = (
|
||||
types.NoneType,
|
||||
bool,
|
||||
complex,
|
||||
float,
|
||||
int,
|
||||
long,
|
||||
tuple,
|
||||
frozenset,
|
||||
str
|
||||
basestring
|
||||
)
|
||||
|
||||
MUTABLE = "__mutable__"
|
||||
@@ -59,7 +61,7 @@ class COWDictMeta(COWMeta):
|
||||
__call__ = cow
|
||||
|
||||
def __setitem__(cls, key, value):
|
||||
if value is not None and not isinstance(value, ImmutableTypes):
|
||||
if not isinstance(value, ImmutableTypes):
|
||||
if not isinstance(value, COWMeta):
|
||||
cls.__hasmutable__ = True
|
||||
key += MUTABLE
|
||||
@@ -114,7 +116,7 @@ class COWDictMeta(COWMeta):
|
||||
cls.__setitem__(key, cls.__marker__)
|
||||
|
||||
def __revertitem__(cls, key):
|
||||
if key not in cls.__dict__:
|
||||
if not cls.__dict__.has_key(key):
|
||||
key += MUTABLE
|
||||
delattr(cls, key)
|
||||
|
||||
@@ -181,7 +183,7 @@ class COWSetMeta(COWDictMeta):
|
||||
COWDictMeta.__delitem__(cls, repr(hash(value)))
|
||||
|
||||
def __in__(cls, value):
|
||||
return repr(hash(value)) in COWDictMeta
|
||||
return COWDictMeta.has_key(repr(hash(value)))
|
||||
|
||||
def iterkeys(cls):
|
||||
raise TypeError("sets don't have keys")
|
||||
@@ -190,10 +192,12 @@ class COWSetMeta(COWDictMeta):
|
||||
raise TypeError("sets don't have 'items'")
|
||||
|
||||
# These are the actual classes you use!
|
||||
class COWDictBase(object, metaclass = COWDictMeta):
|
||||
class COWDictBase(object):
|
||||
__metaclass__ = COWDictMeta
|
||||
__count__ = 0
|
||||
|
||||
class COWSetBase(object, metaclass = COWSetMeta):
|
||||
class COWSetBase(object):
|
||||
__metaclass__ = COWSetMeta
|
||||
__count__ = 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -213,11 +217,11 @@ if __name__ == "__main__":
|
||||
print()
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b.items():
|
||||
for x in b.iteritems():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -225,11 +229,11 @@ if __name__ == "__main__":
|
||||
b['a'] = 'c'
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b.items():
|
||||
for x in b.iteritems():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -244,22 +248,22 @@ if __name__ == "__main__":
|
||||
a['set'].add("o2")
|
||||
|
||||
print("a", a)
|
||||
for x in a['set'].values():
|
||||
for x in a['set'].itervalues():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b['set'].values():
|
||||
for x in b['set'].itervalues():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
b['set'].add('o3')
|
||||
|
||||
print("a", a)
|
||||
for x in a['set'].values():
|
||||
for x in a['set'].itervalues():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
for x in b['set'].values():
|
||||
for x in b['set'].itervalues():
|
||||
print(x)
|
||||
print()
|
||||
|
||||
@@ -269,7 +273,7 @@ if __name__ == "__main__":
|
||||
a['set2'].add("o2")
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -283,13 +287,13 @@ if __name__ == "__main__":
|
||||
except KeyError:
|
||||
print("Yay! deleted key raises error")
|
||||
|
||||
if 'b' in b:
|
||||
if b.has_key('b'):
|
||||
print("Boo!")
|
||||
else:
|
||||
print("Yay - has_key with delete works!")
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -300,7 +304,7 @@ if __name__ == "__main__":
|
||||
b.__revertitem__('b')
|
||||
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
@@ -310,7 +314,7 @@ if __name__ == "__main__":
|
||||
|
||||
b.__revertitem__('dict')
|
||||
print("a", a)
|
||||
for x in a.items():
|
||||
for x in a.iteritems():
|
||||
print(x)
|
||||
print("--")
|
||||
print("b", b)
|
||||
|
||||
@@ -21,11 +21,11 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.31.0"
|
||||
__version__ = "1.30.0"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (3, 4, 0):
|
||||
raise RuntimeError("Sorry, python 3.4.0 or later is required for this version of bitbake")
|
||||
if sys.version_info < (2, 7, 3):
|
||||
raise RuntimeError("Sorry, python 2.7.3 or later is required for this version of bitbake")
|
||||
|
||||
|
||||
class BBHandledException(Exception):
|
||||
@@ -84,8 +84,8 @@ def plain(*args):
|
||||
mainlogger.plain(''.join(args))
|
||||
|
||||
def debug(lvl, *args):
|
||||
if isinstance(lvl, str):
|
||||
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
if isinstance(lvl, basestring):
|
||||
mainlogger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||
args = (lvl,) + args
|
||||
lvl = 1
|
||||
mainlogger.debug(lvl, ''.join(args))
|
||||
@@ -94,7 +94,7 @@ def note(*args):
|
||||
mainlogger.info(''.join(args))
|
||||
|
||||
def warn(*args):
|
||||
mainlogger.warning(''.join(args))
|
||||
mainlogger.warn(''.join(args))
|
||||
|
||||
def error(*args, **kwargs):
|
||||
mainlogger.error(''.join(args), extra=kwargs)
|
||||
|
||||
@@ -35,7 +35,8 @@ import stat
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
from bb import data, event, utils
|
||||
from contextlib import nested
|
||||
from bb import event, utils
|
||||
|
||||
bblogger = logging.getLogger('BitBake')
|
||||
logger = logging.getLogger('BitBake.Build')
|
||||
@@ -60,13 +61,8 @@ def reset_cache():
|
||||
# in all namespaces, hence we add them to __builtins__.
|
||||
# If we do not do this and use the exec globals, they will
|
||||
# not be available to subfunctions.
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
builtins['bb'] = bb
|
||||
builtins['os'] = os
|
||||
__builtins__['bb'] = bb
|
||||
__builtins__['os'] = os
|
||||
|
||||
class FuncFailed(Exception):
|
||||
def __init__(self, name = None, logfile = None):
|
||||
@@ -171,7 +167,7 @@ def exec_func(func, d, dirs = None, pythonexception=False):
|
||||
body = d.getVar(func, False)
|
||||
if not body:
|
||||
if body is None:
|
||||
logger.warning("Function %s doesn't exist", func)
|
||||
logger.warn("Function %s doesn't exist", func)
|
||||
return
|
||||
|
||||
flags = d.getVarFlags(func)
|
||||
@@ -327,7 +323,7 @@ trap '' 0
|
||||
exit $ret
|
||||
''')
|
||||
|
||||
os.chmod(runfile, 0o775)
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot', False):
|
||||
@@ -341,12 +337,12 @@ exit $ret
|
||||
logfile = sys.stdout
|
||||
|
||||
def readfifo(data):
|
||||
lines = data.split(b'\0')
|
||||
lines = data.split('\0')
|
||||
for line in lines:
|
||||
splitval = line.split(b' ', 1)
|
||||
splitval = line.split(' ', 1)
|
||||
cmd = splitval[0]
|
||||
if len(splitval) > 1:
|
||||
value = splitval[1].decode("utf-8")
|
||||
value = splitval[1]
|
||||
else:
|
||||
value = ''
|
||||
if cmd == 'bbplain':
|
||||
@@ -374,7 +370,7 @@ exit $ret
|
||||
if os.path.exists(fifopath):
|
||||
os.unlink(fifopath)
|
||||
os.mkfifo(fifopath)
|
||||
with open(fifopath, 'r+b', buffering=0) as fifo:
|
||||
with open(fifopath, 'r+') as fifo:
|
||||
try:
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
@@ -650,7 +646,7 @@ def make_stamp(task, d, file_name = None):
|
||||
for mask in cleanmask:
|
||||
for name in glob.glob(mask):
|
||||
# Preserve sigdata files in the stamps directory
|
||||
if "sigdata" in name:
|
||||
if "sigdata" in name or "sigbasedata" in name:
|
||||
continue
|
||||
# Preserve taint files in the stamps directory
|
||||
if name.endswith('.taint'):
|
||||
|
||||
@@ -28,16 +28,22 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import pickle
|
||||
from collections import defaultdict
|
||||
import bb.utils
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
__cache_version__ = "150"
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "149"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -74,7 +80,7 @@ class RecipeInfoCommon(object):
|
||||
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
|
||||
for var in varlist)
|
||||
if squash:
|
||||
return dict((k,v) for (k,v) in out_dict.items() if v)
|
||||
return dict((k,v) for (k,v) in out_dict.iteritems() if v)
|
||||
else:
|
||||
return out_dict
|
||||
|
||||
@@ -234,7 +240,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.universe_target.append(self.pn)
|
||||
|
||||
cachedata.hashfn[fn] = self.hashfilename
|
||||
for task, taskhash in self.basetaskhashes.items():
|
||||
for task, taskhash in self.basetaskhashes.iteritems():
|
||||
identifier = '%s.%s' % (fn, task)
|
||||
cachedata.basetaskhash[identifier] = taskhash
|
||||
|
||||
@@ -333,7 +339,7 @@ class Cache(object):
|
||||
value = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if key in self.depends_cache:
|
||||
if self.depends_cache.has_key(key):
|
||||
self.depends_cache[key].append(value)
|
||||
else:
|
||||
self.depends_cache[key] = [value]
|
||||
@@ -398,7 +404,7 @@ class Cache(object):
|
||||
infos = []
|
||||
datastores = cls.load_bbfile(filename, appends, configdata)
|
||||
depends = []
|
||||
for variant, data in sorted(datastores.items(),
|
||||
for variant, data in sorted(datastores.iteritems(),
|
||||
key=lambda i: i[0],
|
||||
reverse=True):
|
||||
virtualfn = cls.realfn2virtual(filename, variant)
|
||||
@@ -610,7 +616,7 @@ class Cache(object):
|
||||
pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
|
||||
|
||||
try:
|
||||
for key, info_array in self.depends_cache.items():
|
||||
for key, info_array in self.depends_cache.iteritems():
|
||||
for info in info_array:
|
||||
if isinstance(info, RecipeInfoCommon):
|
||||
cache_class_name = info.__class__.__name__
|
||||
|
||||
@@ -19,13 +19,20 @@ import glob
|
||||
import operator
|
||||
import os
|
||||
import stat
|
||||
import pickle
|
||||
import bb.utils
|
||||
import logging
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
# mtime cache (non-persistent)
|
||||
# based upon the assumption that files do not change during bitbake run
|
||||
class FileMtimeCache(object):
|
||||
@@ -120,13 +127,15 @@ class FileChecksumCache(MultiProcessCache):
|
||||
checksums.extend(checksum_dir(f))
|
||||
else:
|
||||
checksum = checksum_file(f)
|
||||
checksums.append((f, checksum))
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
if not os.path.islink(pth):
|
||||
checksums.extend(checksum_dir(pth))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
checksums.append((pth, checksum))
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort(key=operator.itemgetter(1))
|
||||
return checksums
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
import ast
|
||||
import sys
|
||||
import codegen
|
||||
import logging
|
||||
import pickle
|
||||
import bb.pysh as pysh
|
||||
import os.path
|
||||
import bb.utils, bb.data
|
||||
import hashlib
|
||||
from itertools import chain
|
||||
from bb.pysh import pyshyacc, pyshlex, sherrors
|
||||
from pysh import pyshyacc, pyshlex, sherrors
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake.CodeParser')
|
||||
|
||||
def bbhash(s):
|
||||
return hashlib.md5(s.encode("utf-8")).hexdigest()
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
||||
|
||||
|
||||
def check_indent(codestr):
|
||||
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
|
||||
@@ -67,12 +68,11 @@ class SetCache(object):
|
||||
|
||||
new = []
|
||||
for i in items:
|
||||
new.append(sys.intern(i))
|
||||
new.append(intern(i))
|
||||
s = frozenset(new)
|
||||
h = hash(s)
|
||||
if h in self.setcache:
|
||||
return self.setcache[h]
|
||||
self.setcache[h] = s
|
||||
if hash(s) in self.setcache:
|
||||
return self.setcache[hash(s)]
|
||||
self.setcache[hash(s)] = s
|
||||
return s
|
||||
|
||||
codecache = SetCache()
|
||||
@@ -117,7 +117,7 @@ class shellCacheLine(object):
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 8
|
||||
CACHE_VERSION = 7
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
@@ -191,7 +191,6 @@ class BufferedLogger(Logger):
|
||||
|
||||
class PythonParser():
|
||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
||||
getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
@@ -211,20 +210,15 @@ class PythonParser():
|
||||
|
||||
def visit_Call(self, node):
|
||||
name = self.called_node_name(node.func)
|
||||
if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs):
|
||||
if name and name.endswith(self.getvars) or name in self.containsfuncs:
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
varname = node.args[0].s
|
||||
if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
|
||||
if varname not in self.contains:
|
||||
self.contains[varname] = set()
|
||||
self.contains[varname].add(node.args[1].s)
|
||||
elif name.endswith(self.getvarflags):
|
||||
if isinstance(node.args[1], ast.Str):
|
||||
self.references.add('%s[%s]' % (varname, node.args[1].s))
|
||||
else:
|
||||
self.warn(node.func, node.args[1])
|
||||
else:
|
||||
self.references.add(varname)
|
||||
else:
|
||||
self.references.add(node.args[0].s)
|
||||
else:
|
||||
self.warn(node.func, node.args[0])
|
||||
elif name and name.endswith(".expand"):
|
||||
@@ -274,7 +268,7 @@ class PythonParser():
|
||||
if not node or not node.strip():
|
||||
return
|
||||
|
||||
h = bbhash(str(node))
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
||||
@@ -319,7 +313,7 @@ class ShellParser():
|
||||
commands it executes.
|
||||
"""
|
||||
|
||||
h = bbhash(str(value))
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
||||
|
||||
@@ -110,7 +110,7 @@ class Command:
|
||||
return False
|
||||
except SystemExit as exc:
|
||||
arg = exc.args[0]
|
||||
if isinstance(arg, str):
|
||||
if isinstance(arg, basestring):
|
||||
self.finishAsyncCommand(arg)
|
||||
else:
|
||||
self.finishAsyncCommand("Exited with %s" % arg)
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, os, glob, os.path, re, time
|
||||
import atexit
|
||||
import itertools
|
||||
@@ -30,21 +30,18 @@ import logging
|
||||
import multiprocessing
|
||||
import sre_constants
|
||||
import threading
|
||||
from io import StringIO
|
||||
from cStringIO import StringIO
|
||||
from contextlib import closing
|
||||
from functools import wraps
|
||||
from collections import defaultdict, namedtuple
|
||||
from collections import defaultdict
|
||||
import bb, bb.exceptions, bb.command
|
||||
from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
|
||||
import queue
|
||||
import Queue
|
||||
import signal
|
||||
import subprocess
|
||||
import errno
|
||||
import prserv.serv
|
||||
import pyinotify
|
||||
import json
|
||||
import pickle
|
||||
import codecs
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
collectlog = logging.getLogger("BitBake.Collection")
|
||||
@@ -68,7 +65,7 @@ class CollectionError(bb.BBHandledException):
|
||||
"""
|
||||
|
||||
class state:
|
||||
initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
|
||||
initial, parsing, running, shutdown, forceshutdown, stopped, error = range(7)
|
||||
|
||||
@classmethod
|
||||
def get_name(cls, code):
|
||||
@@ -96,7 +93,7 @@ class SkippedPackage:
|
||||
|
||||
|
||||
class CookerFeatures(object):
|
||||
_feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3))
|
||||
_feature_list = [HOB_EXTRA_CACHES, SEND_DEPENDS_TREE, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = range(4)
|
||||
|
||||
def __init__(self):
|
||||
self._features=set()
|
||||
@@ -113,50 +110,10 @@ class CookerFeatures(object):
|
||||
def __iter__(self):
|
||||
return self._features.__iter__()
|
||||
|
||||
def __next__(self):
|
||||
return next(self._features)
|
||||
def next(self):
|
||||
return self._features.next()
|
||||
|
||||
|
||||
class EventWriter:
|
||||
def __init__(self, cooker, eventfile):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.eventfile = eventfile
|
||||
self.event_queue = []
|
||||
|
||||
def write_event(self, event):
|
||||
with open(self.eventfile, "a") as f:
|
||||
try:
|
||||
str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
|
||||
f.write("%s\n" % json.dumps({"class": event.__module__ + "." + event.__class__.__name__,
|
||||
"vars": str_event}))
|
||||
except Exception as err:
|
||||
import traceback
|
||||
print(err, traceback.format_exc())
|
||||
|
||||
def send(self, event):
|
||||
if self.file_inited:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
else:
|
||||
# init on bb.event.BuildStarted
|
||||
name = "%s.%s" % (event.__module__, event.__class__.__name__)
|
||||
if name == "bb.event.BuildStarted":
|
||||
with open(self.eventfile, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for evt in self.event_queue:
|
||||
self.write_event(evt)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
#============================================================================#
|
||||
# BBCooker
|
||||
#============================================================================#
|
||||
@@ -194,13 +151,6 @@ class BBCooker:
|
||||
|
||||
self.initConfigurationData()
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
# register the log file writer as UI Handler
|
||||
writer = EventWriter(self, self.configuration.writeeventlog)
|
||||
EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler(writer))
|
||||
|
||||
self.inotify_modified_files = []
|
||||
|
||||
def _process_inotify_updates(server, notifier_list, abort):
|
||||
@@ -351,6 +301,74 @@ class BBCooker:
|
||||
if consolelog:
|
||||
self.data.setVar("BB_CONSOLELOG", consolelog)
|
||||
|
||||
# we log all events to a file if so directed
|
||||
if self.configuration.writeeventlog:
|
||||
import json, pickle
|
||||
DEFAULT_EVENTFILE = self.configuration.writeeventlog
|
||||
class EventLogWriteHandler():
|
||||
|
||||
class EventWriter():
|
||||
def __init__(self, cooker):
|
||||
self.file_inited = None
|
||||
self.cooker = cooker
|
||||
self.event_queue = []
|
||||
|
||||
def init_file(self):
|
||||
try:
|
||||
# delete the old log
|
||||
os.remove(DEFAULT_EVENTFILE)
|
||||
except:
|
||||
pass
|
||||
|
||||
# write current configuration data
|
||||
with open(DEFAULT_EVENTFILE, "w") as f:
|
||||
f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
|
||||
|
||||
def write_event(self, event):
|
||||
with open(DEFAULT_EVENTFILE, "a") as f:
|
||||
try:
|
||||
f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(e, traceback.format_exc(e))
|
||||
|
||||
|
||||
def send(self, event):
|
||||
event_class = event.__module__ + "." + event.__class__.__name__
|
||||
|
||||
# init on bb.event.BuildStarted
|
||||
if self.file_inited is None:
|
||||
if event_class == "bb.event.BuildStarted":
|
||||
self.init_file()
|
||||
self.file_inited = True
|
||||
|
||||
# write pending events
|
||||
for e in self.event_queue:
|
||||
self.write_event(e)
|
||||
|
||||
# also write the current event
|
||||
self.write_event(event)
|
||||
|
||||
else:
|
||||
# queue all events until the file is inited
|
||||
self.event_queue.append(event)
|
||||
|
||||
else:
|
||||
# we have the file, just write the event
|
||||
self.write_event(event)
|
||||
|
||||
# set our handler's event processor
|
||||
event = EventWriter(self) # self is the cooker here
|
||||
|
||||
|
||||
# set up cooker features for this mock UI handler
|
||||
|
||||
# we need to write the dependency tree in the log
|
||||
self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
|
||||
# register the log file writer as UI Handler
|
||||
bb.event.register_UIHhandler(EventLogWriteHandler())
|
||||
|
||||
|
||||
#
|
||||
# Copy of the data store which has been expanded.
|
||||
# Used for firing events and accessing variables where expansion needs to be accounted for
|
||||
@@ -611,7 +629,9 @@ class BBCooker:
|
||||
|
||||
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True)
|
||||
|
||||
fn = taskdata.build_targets[pkgs_to_build[0]][0]
|
||||
targetid = taskdata.getbuild_id(pkgs_to_build[0])
|
||||
fnid = taskdata.build_targets[targetid][0]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
else:
|
||||
envdata = self.data
|
||||
|
||||
@@ -636,7 +656,7 @@ class BBCooker:
|
||||
# emit the metadata which isnt valid shell
|
||||
data.expandKeys(envdata)
|
||||
for e in envdata.keys():
|
||||
if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
|
||||
if data.getVarFlag( e, 'python', envdata ):
|
||||
logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
|
||||
|
||||
|
||||
@@ -700,7 +720,7 @@ class BBCooker:
|
||||
|
||||
|
||||
def buildDependTree(self, rq, taskdata):
|
||||
seen_fns = []
|
||||
seen_fnids = []
|
||||
depend_tree = {}
|
||||
depend_tree["depends"] = {}
|
||||
depend_tree["tdepends"] = {}
|
||||
@@ -712,15 +732,16 @@ class BBCooker:
|
||||
depend_tree['providermap'] = {}
|
||||
depend_tree["layer-priorities"] = self.recipecache.bbfile_config_priorities
|
||||
|
||||
for name, fn in list(taskdata.get_providermap().items()):
|
||||
for name, fn in taskdata.get_providermap().iteritems():
|
||||
pn = self.recipecache.pkg_fn[fn]
|
||||
if name != pn:
|
||||
version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
|
||||
depend_tree['providermap'][name] = (pn, version)
|
||||
|
||||
for tid in rq.rqdata.runtaskentries:
|
||||
taskname = bb.runqueue.taskname_from_tid(tid)
|
||||
fn = bb.runqueue.fn_from_tid(tid)
|
||||
for task in xrange(len(rq.rqdata.runq_fnid)):
|
||||
taskname = rq.rqdata.runq_task[task]
|
||||
fnid = rq.rqdata.runq_fnid[task]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
pn = self.recipecache.pkg_fn[fn]
|
||||
version = "%s:%s-%s" % self.recipecache.pkg_pepvpr[fn]
|
||||
if pn not in depend_tree["pn"]:
|
||||
@@ -741,24 +762,24 @@ class BBCooker:
|
||||
depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
|
||||
|
||||
|
||||
for dep in rq.rqdata.runtaskentries[tid].depends:
|
||||
depfn = bb.runqueue.fn_from_tid(dep)
|
||||
for dep in rq.rqdata.runq_depends[task]:
|
||||
depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
|
||||
deppn = self.recipecache.pkg_fn[depfn]
|
||||
dotname = "%s.%s" % (pn, bb.runqueue.taskname_from_tid(dep))
|
||||
dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
|
||||
if not dotname in depend_tree["tdepends"]:
|
||||
depend_tree["tdepends"][dotname] = []
|
||||
depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep)))
|
||||
if fn not in seen_fns:
|
||||
seen_fns.append(fn)
|
||||
depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
|
||||
if fnid not in seen_fnids:
|
||||
seen_fnids.append(fnid)
|
||||
packages = []
|
||||
|
||||
depend_tree["depends"][pn] = []
|
||||
for dep in taskdata.depids[fn]:
|
||||
depend_tree["depends"][pn].append(dep)
|
||||
for dep in taskdata.depids[fnid]:
|
||||
depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
|
||||
|
||||
depend_tree["rdepends-pn"][pn] = []
|
||||
for rdep in taskdata.rdepids[fn]:
|
||||
depend_tree["rdepends-pn"][pn].append(rdep)
|
||||
for rdep in taskdata.rdepids[fnid]:
|
||||
depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
|
||||
|
||||
rdepends = self.recipecache.rundeps[fn]
|
||||
for package in rdepends:
|
||||
@@ -790,8 +811,12 @@ class BBCooker:
|
||||
Create a dependency tree of pkgs_to_build, returning the data.
|
||||
"""
|
||||
_, taskdata = self.prepareTreeData(pkgs_to_build, task)
|
||||
tasks_fnid = []
|
||||
if len(taskdata.tasks_name) != 0:
|
||||
for task in xrange(len(taskdata.tasks_name)):
|
||||
tasks_fnid.append(taskdata.tasks_fnid[task])
|
||||
|
||||
seen_fns = []
|
||||
seen_fnids = []
|
||||
depend_tree = {}
|
||||
depend_tree["depends"] = {}
|
||||
depend_tree["pn"] = {}
|
||||
@@ -806,8 +831,9 @@ class BBCooker:
|
||||
cachefields = getattr(cache_class, 'cachefields', [])
|
||||
extra_info = extra_info + cachefields
|
||||
|
||||
for tid in taskdata.taskentries:
|
||||
fn = bb.runqueue.fn_from_tid(tid)
|
||||
for task in xrange(len(tasks_fnid)):
|
||||
fnid = tasks_fnid[task]
|
||||
fn = taskdata.fn_index[fnid]
|
||||
pn = self.recipecache.pkg_fn[fn]
|
||||
|
||||
if pn not in depend_tree["pn"]:
|
||||
@@ -823,27 +849,33 @@ class BBCooker:
|
||||
for ei in extra_info:
|
||||
depend_tree["pn"][pn][ei] = vars(self.recipecache)[ei][fn]
|
||||
|
||||
if fn not in seen_fns:
|
||||
seen_fns.append(fn)
|
||||
if fnid not in seen_fnids:
|
||||
seen_fnids.append(fnid)
|
||||
|
||||
depend_tree["depends"][pn] = []
|
||||
for item in taskdata.depids[fn]:
|
||||
for dep in taskdata.depids[fnid]:
|
||||
item = taskdata.build_names_index[dep]
|
||||
pn_provider = ""
|
||||
if dep in taskdata.build_targets and taskdata.build_targets[dep]:
|
||||
fn_provider = taskdata.build_targets[dep][0]
|
||||
targetid = taskdata.getbuild_id(item)
|
||||
if targetid in taskdata.build_targets and taskdata.build_targets[targetid]:
|
||||
id = taskdata.build_targets[targetid][0]
|
||||
fn_provider = taskdata.fn_index[id]
|
||||
pn_provider = self.recipecache.pkg_fn[fn_provider]
|
||||
else:
|
||||
pn_provider = item
|
||||
depend_tree["depends"][pn].append(pn_provider)
|
||||
|
||||
depend_tree["rdepends-pn"][pn] = []
|
||||
for rdep in taskdata.rdepids[fn]:
|
||||
for rdep in taskdata.rdepids[fnid]:
|
||||
item = taskdata.run_names_index[rdep]
|
||||
pn_rprovider = ""
|
||||
if rdep in taskdata.run_targets and taskdata.run_targets[rdep]:
|
||||
fn_rprovider = taskdata.run_targets[rdep][0]
|
||||
targetid = taskdata.getrun_id(item)
|
||||
if targetid in taskdata.run_targets and taskdata.run_targets[targetid]:
|
||||
id = taskdata.run_targets[targetid][0]
|
||||
fn_rprovider = taskdata.fn_index[id]
|
||||
pn_rprovider = self.recipecache.pkg_fn[fn_rprovider]
|
||||
else:
|
||||
pn_rprovider = rdep
|
||||
pn_rprovider = item
|
||||
depend_tree["rdepends-pn"][pn].append(pn_rprovider)
|
||||
|
||||
depend_tree["rdepends-pkg"].update(rdepends)
|
||||
@@ -868,8 +900,8 @@ class BBCooker:
|
||||
depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
|
||||
|
||||
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
|
||||
depends_file = open('pn-depends.dot', 'w' )
|
||||
buildlist_file = open('pn-buildlist', 'w' )
|
||||
depends_file = file('pn-depends.dot', 'w' )
|
||||
buildlist_file = file('pn-buildlist', 'w' )
|
||||
print("digraph depends {", file=depends_file)
|
||||
for pn in depgraph["pn"]:
|
||||
fn = depgraph["pn"][pn]["filename"]
|
||||
@@ -885,10 +917,9 @@ class BBCooker:
|
||||
for rdepend in depgraph["rdepends-pn"][pn]:
|
||||
print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
|
||||
print("}", file=depends_file)
|
||||
depends_file.close()
|
||||
logger.info("PN dependencies saved to 'pn-depends.dot'")
|
||||
|
||||
depends_file = open('package-depends.dot', 'w' )
|
||||
depends_file = file('package-depends.dot', 'w' )
|
||||
print("digraph depends {", file=depends_file)
|
||||
for package in depgraph["packages"]:
|
||||
pn = depgraph["packages"][package]["pn"]
|
||||
@@ -907,10 +938,9 @@ class BBCooker:
|
||||
for rdepend in depgraph["rrecs-pkg"][package]:
|
||||
print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file)
|
||||
print("}", file=depends_file)
|
||||
depends_file.close()
|
||||
logger.info("Package dependencies saved to 'package-depends.dot'")
|
||||
|
||||
tdepends_file = open('task-depends.dot', 'w' )
|
||||
tdepends_file = file('task-depends.dot', 'w' )
|
||||
print("digraph depends {", file=tdepends_file)
|
||||
for task in depgraph["tdepends"]:
|
||||
(pn, taskname) = task.rsplit(".", 1)
|
||||
@@ -920,14 +950,13 @@ class BBCooker:
|
||||
for dep in depgraph["tdepends"][task]:
|
||||
print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
|
||||
print("}", file=tdepends_file)
|
||||
tdepends_file.close()
|
||||
logger.info("Task dependencies saved to 'task-depends.dot'")
|
||||
|
||||
def show_appends_with_no_recipes(self):
|
||||
# Determine which bbappends haven't been applied
|
||||
|
||||
# First get list of recipes, including skipped
|
||||
recipefns = list(self.recipecache.pkg_fn.keys())
|
||||
recipefns = self.recipecache.pkg_fn.keys()
|
||||
recipefns.extend(self.skiplist.keys())
|
||||
|
||||
# Work out list of bbappends that have been applied
|
||||
@@ -1126,7 +1155,7 @@ class BBCooker:
|
||||
deplist = bb.utils.explode_dep_versions2(deps)
|
||||
except bb.utils.VersionStringException as vse:
|
||||
bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
|
||||
for dep, oplist in list(deplist.items()):
|
||||
for dep, oplist in deplist.iteritems():
|
||||
if dep in collection_list:
|
||||
for opstr in oplist:
|
||||
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
|
||||
@@ -1324,7 +1353,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures, interrupted), self.expanded_data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1360,7 +1389,7 @@ class BBCooker:
|
||||
return False
|
||||
|
||||
if not retval:
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.data)
|
||||
bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures, interrupted), self.data)
|
||||
self.command.finishAsyncCommand(msg)
|
||||
return False
|
||||
if retval is True:
|
||||
@@ -1550,7 +1579,7 @@ class BBCooker:
|
||||
ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
|
||||
for pkg in pkgs_to_build:
|
||||
if pkg in ignore:
|
||||
parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
|
||||
|
||||
if 'world' in pkgs_to_build:
|
||||
bb.providers.buildWorldTargetList(self.recipecache)
|
||||
@@ -1559,7 +1588,7 @@ class BBCooker:
|
||||
pkgs_to_build.append(t)
|
||||
|
||||
if 'universe' in pkgs_to_build:
|
||||
parselog.warning("The \"universe\" target is only intended for testing and may produce errors.")
|
||||
parselog.warn("The \"universe\" target is only intended for testing and may produce errors.")
|
||||
parselog.debug(1, "collating packages for \"universe\"")
|
||||
pkgs_to_build.remove('universe')
|
||||
for t in self.recipecache.universe_target:
|
||||
@@ -1841,7 +1870,7 @@ class CookerCollectFiles(object):
|
||||
for collection, pattern, regex, _ in self.bbfile_config_priorities:
|
||||
if regex in unmatched:
|
||||
if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
|
||||
collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
|
||||
|
||||
return priorities
|
||||
|
||||
@@ -1862,7 +1891,7 @@ class Feeder(multiprocessing.Process):
|
||||
while True:
|
||||
try:
|
||||
quit = self.quit.get_nowait()
|
||||
except queue.Empty:
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
if quit == 'cancel':
|
||||
@@ -1876,7 +1905,7 @@ class Feeder(multiprocessing.Process):
|
||||
|
||||
try:
|
||||
self.to_parsers.put(job, timeout=0.5)
|
||||
except queue.Full:
|
||||
except Queue.Full:
|
||||
self.jobs.insert(0, job)
|
||||
continue
|
||||
|
||||
@@ -1916,7 +1945,7 @@ class Parser(multiprocessing.Process):
|
||||
while True:
|
||||
try:
|
||||
self.quit.get_nowait()
|
||||
except queue.Empty:
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
self.results.cancel_join_thread()
|
||||
@@ -1927,7 +1956,7 @@ class Parser(multiprocessing.Process):
|
||||
else:
|
||||
try:
|
||||
job = self.jobs.get(timeout=0.25)
|
||||
except queue.Empty:
|
||||
except Queue.Empty:
|
||||
continue
|
||||
|
||||
if job is None:
|
||||
@@ -1936,7 +1965,7 @@ class Parser(multiprocessing.Process):
|
||||
|
||||
try:
|
||||
self.results.put(result, timeout=0.25)
|
||||
except queue.Full:
|
||||
except Queue.Full:
|
||||
pending.append(result)
|
||||
|
||||
def parse(self, filename, appends, caches_array):
|
||||
@@ -1994,7 +2023,7 @@ class CookerParser(object):
|
||||
else:
|
||||
self.fromcache.append((filename, appends))
|
||||
self.toparse = self.total - len(self.fromcache)
|
||||
self.progress_chunk = int(max(self.toparse / 100, 1))
|
||||
self.progress_chunk = max(self.toparse / 100, 1)
|
||||
|
||||
self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
|
||||
multiprocessing.cpu_count()), len(self.willparse))
|
||||
@@ -2089,7 +2118,7 @@ class CookerParser(object):
|
||||
|
||||
try:
|
||||
result = self.result_queue.get(timeout=0.25)
|
||||
except queue.Empty:
|
||||
except Queue.Empty:
|
||||
pass
|
||||
else:
|
||||
value = result[1]
|
||||
@@ -2102,7 +2131,7 @@ class CookerParser(object):
|
||||
result = []
|
||||
parsed = None
|
||||
try:
|
||||
parsed, result = next(self.results)
|
||||
parsed, result = self.results.next()
|
||||
except StopIteration:
|
||||
self.shutdown()
|
||||
return False
|
||||
@@ -2124,18 +2153,15 @@ class CookerParser(object):
|
||||
return False
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
self.error += 1
|
||||
bbdir = os.path.dirname(__file__) + os.sep
|
||||
etype, value, _ = sys.exc_info()
|
||||
tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
|
||||
logger.error('ExpansionError during parsing %s', value.recipe,
|
||||
exc_info=(etype, value, tb))
|
||||
_, value, _ = sys.exc_info()
|
||||
logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except Exception as exc:
|
||||
self.error += 1
|
||||
etype, value, tb = sys.exc_info()
|
||||
if hasattr(value, "recipe"):
|
||||
logger.error('Unable to parse %s' % value.recipe,
|
||||
logger.error('Unable to parse %s', value.recipe,
|
||||
exc_info=(etype, value, exc.traceback))
|
||||
else:
|
||||
# Most likely, an exception occurred during raising an exception
|
||||
|
||||
@@ -22,11 +22,9 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import os, sys
|
||||
from functools import wraps
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
import bb.parse
|
||||
@@ -194,8 +192,7 @@ def catch_parse_error(func):
|
||||
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
|
||||
if not fn.startswith(bbdir):
|
||||
break
|
||||
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
|
||||
sys.exit(1)
|
||||
parselog.critical("Unable to parse %s", fn, exc_info=(exc_class, exc, tb))
|
||||
except bb.parse.ParseError as exc:
|
||||
parselog.critical(str(exc))
|
||||
sys.exit(1)
|
||||
@@ -292,22 +289,15 @@ class CookerDataBuilder(object):
|
||||
data = bb.data.createCopy(data)
|
||||
approved = bb.utils.approved_variables()
|
||||
for layer in layers:
|
||||
if not os.path.isdir(layer):
|
||||
parselog.critical("Layer directory '%s' does not exist! "
|
||||
"Please check BBLAYERS in %s" % (layer, layerconf))
|
||||
sys.exit(1)
|
||||
parselog.debug(2, "Adding layer %s", layer)
|
||||
if 'HOME' in approved and '~' in layer:
|
||||
layer = os.path.expanduser(layer)
|
||||
if layer.endswith('/'):
|
||||
layer = layer.rstrip('/')
|
||||
data.setVar('LAYERDIR', layer)
|
||||
data.setVar('LAYERDIR_RE', re.escape(layer))
|
||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
||||
data.expandVarref('LAYERDIR')
|
||||
data.expandVarref('LAYERDIR_RE')
|
||||
|
||||
data.delVar('LAYERDIR_RE')
|
||||
data.delVar('LAYERDIR')
|
||||
|
||||
if not data.getVar("BBPATH", True):
|
||||
|
||||
@@ -178,8 +178,8 @@ def createDaemon(function, logfile):
|
||||
# os.dup2(0, 2) # standard error (2)
|
||||
|
||||
|
||||
si = open('/dev/null', 'r')
|
||||
so = open(logfile, 'w')
|
||||
si = file('/dev/null', 'r')
|
||||
so = file(logfile, 'w')
|
||||
se = so
|
||||
|
||||
|
||||
|
||||
@@ -182,12 +182,12 @@ def inheritFromOS(d, savedenv, permitted):
|
||||
|
||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||
"""Emit a variable to be sourced by a shell."""
|
||||
func = d.getVarFlag(var, "func", False)
|
||||
if d.getVarFlag(var, 'python', False) and func:
|
||||
if d.getVarFlag(var, "python", False):
|
||||
return False
|
||||
|
||||
export = d.getVarFlag(var, "export", False)
|
||||
unexport = d.getVarFlag(var, "unexport", False)
|
||||
func = d.getVarFlag(var, "func", False)
|
||||
if not all and not export and not unexport and not func:
|
||||
return False
|
||||
|
||||
@@ -339,7 +339,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
return deps, value
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
|
||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "vardepvalueexclude", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
|
||||
vardeps = varflags.get("vardeps")
|
||||
value = d.getVar(key, False)
|
||||
|
||||
@@ -364,7 +364,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
if varflags.get("python"):
|
||||
parser = bb.codeparser.PythonParser(key, logger)
|
||||
if value and "\t" in value:
|
||||
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
|
||||
deps = deps | parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
@@ -383,8 +383,6 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||
deps = deps | set(varflags["prefuncs"].split())
|
||||
if "postfuncs" in varflags:
|
||||
deps = deps | set(varflags["postfuncs"].split())
|
||||
if "exports" in varflags:
|
||||
deps = deps | set(varflags["exports"].split())
|
||||
else:
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
|
||||
@@ -135,7 +135,7 @@ class VariableParse:
|
||||
self.contains[k] = parser.contains[k].copy()
|
||||
else:
|
||||
self.contains[k].update(parser.contains[k])
|
||||
value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
|
||||
value = utils.better_eval(codeobj, DataContext(self.d))
|
||||
return str(value)
|
||||
|
||||
|
||||
@@ -372,7 +372,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
def expandWithRefs(self, s, varname):
|
||||
|
||||
if not isinstance(s, str): # sanity check
|
||||
if not isinstance(s, basestring): # sanity check
|
||||
return VariableParse(varname, self, s)
|
||||
|
||||
if varname and varname in self.expand_cache:
|
||||
@@ -397,7 +397,8 @@ class DataSmart(MutableMapping):
|
||||
except bb.parse.SkipRecipe:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise ExpansionError(varname, s, exc) from exc
|
||||
exc_class, exc, tb = sys.exc_info()
|
||||
raise ExpansionError, ExpansionError(varname, s, exc), tb
|
||||
|
||||
varparse.value = s
|
||||
|
||||
@@ -916,7 +917,7 @@ class DataSmart(MutableMapping):
|
||||
yield k
|
||||
|
||||
def __len__(self):
|
||||
return len(frozenset(iter(self)))
|
||||
return len(frozenset(self))
|
||||
|
||||
def __getitem__(self, item):
|
||||
value = self.getVar(item, False)
|
||||
@@ -965,4 +966,4 @@ class DataSmart(MutableMapping):
|
||||
data.update({i:value})
|
||||
|
||||
data_str = str([(k, data[k]) for k in sorted(data.keys())])
|
||||
return hashlib.md5(data_str.encode("utf-8")).hexdigest()
|
||||
return hashlib.md5(data_str).hexdigest()
|
||||
|
||||
@@ -24,7 +24,10 @@ BitBake build tools.
|
||||
|
||||
import os, sys
|
||||
import warnings
|
||||
import pickle
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
import logging
|
||||
import atexit
|
||||
import traceback
|
||||
@@ -69,16 +72,11 @@ _catchall_handlers = {}
|
||||
_eventfilter = None
|
||||
_uiready = False
|
||||
|
||||
if hasattr(__builtins__, '__setitem__'):
|
||||
builtins = __builtins__
|
||||
else:
|
||||
builtins = __builtins__.__dict__
|
||||
|
||||
def execute_handler(name, handler, event, d):
|
||||
event.data = d
|
||||
addedd = False
|
||||
if 'd' not in builtins:
|
||||
builtins['d'] = d
|
||||
if 'd' not in __builtins__:
|
||||
__builtins__['d'] = d
|
||||
addedd = True
|
||||
try:
|
||||
ret = handler(event)
|
||||
@@ -96,7 +94,7 @@ def execute_handler(name, handler, event, d):
|
||||
finally:
|
||||
del event.data
|
||||
if addedd:
|
||||
del builtins['d']
|
||||
del __builtins__['d']
|
||||
|
||||
def fire_class_handlers(event, d):
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -104,7 +102,7 @@ def fire_class_handlers(event, d):
|
||||
|
||||
eid = str(event.__class__)[8:-2]
|
||||
evt_hmap = _event_handler_map.get(eid, {})
|
||||
for name, handler in list(_handlers.items()):
|
||||
for name, handler in _handlers.iteritems():
|
||||
if name in _catchall_handlers or name in evt_hmap:
|
||||
if _eventfilter:
|
||||
if not _eventfilter(name, handler, event, d):
|
||||
@@ -119,22 +117,29 @@ def print_ui_queue():
|
||||
logger = logging.getLogger("BitBake")
|
||||
if not _uiready:
|
||||
from bb.msg import BBLogFormatter
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||
logger.handlers = [console]
|
||||
stdout = logging.StreamHandler(sys.stdout)
|
||||
stderr = logging.StreamHandler(sys.stderr)
|
||||
formatter = BBLogFormatter("%(levelname)s: %(message)s")
|
||||
stdout.setFormatter(formatter)
|
||||
stderr.setFormatter(formatter)
|
||||
|
||||
# First check to see if we have any proper messages
|
||||
msgprint = False
|
||||
for event in ui_queue:
|
||||
for event in ui_queue[:]:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno > logging.DEBUG:
|
||||
if event.levelno >= logging.WARNING:
|
||||
logger.addHandler(stderr)
|
||||
else:
|
||||
logger.addHandler(stdout)
|
||||
logger.handle(event)
|
||||
msgprint = True
|
||||
if msgprint:
|
||||
return
|
||||
|
||||
# Nope, so just print all of the messages we have (including debug messages)
|
||||
for event in ui_queue:
|
||||
logger.addHandler(stdout)
|
||||
for event in ui_queue[:]:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
|
||||
@@ -189,7 +194,7 @@ def register(name, handler, mask=None, filename=None, lineno=None):
|
||||
|
||||
if handler is not None:
|
||||
# handle string containing python code
|
||||
if isinstance(handler, str):
|
||||
if isinstance(handler, basestring):
|
||||
tmp = "def %s(e):\n%s" % (name, handler)
|
||||
try:
|
||||
code = bb.methodpool.compile_cache(tmp)
|
||||
@@ -227,13 +232,6 @@ def remove(name, handler):
|
||||
"""Remove an Event handler"""
|
||||
_handlers.pop(name)
|
||||
|
||||
def get_handlers():
|
||||
return _handlers
|
||||
|
||||
def set_handlers(handlers):
|
||||
global _handlers
|
||||
_handlers = handlers
|
||||
|
||||
def set_eventfilter(func):
|
||||
global _eventfilter
|
||||
_eventfilter = func
|
||||
@@ -614,9 +612,8 @@ class LogHandler(logging.Handler):
|
||||
if hasattr(tb, 'tb_next'):
|
||||
tb = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||
# Need to turn the value into something the logging system can pickle
|
||||
record.bb_exc_info = (etype, value, tb)
|
||||
record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
value = str(value)
|
||||
record.bb_exc_info = (etype, value, tb)
|
||||
record.exc_info = None
|
||||
fire(record, None)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
import inspect
|
||||
import traceback
|
||||
import bb.namedtuple_with_abc
|
||||
@@ -86,6 +86,6 @@ def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
|
||||
|
||||
def to_string(exc):
|
||||
if isinstance(exc, SystemExit):
|
||||
if not isinstance(exc.code, str):
|
||||
if not isinstance(exc.code, basestring):
|
||||
return 'Exited with "%d"' % exc.code
|
||||
return str(exc)
|
||||
|
||||
@@ -25,26 +25,31 @@ BitBake build tools.
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import signal
|
||||
import logging
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
if 'git' not in urllib.parse.uses_netloc:
|
||||
urllib.parse.uses_netloc.append('git')
|
||||
import operator
|
||||
import collections
|
||||
import subprocess
|
||||
import pickle
|
||||
import urllib
|
||||
import urlparse
|
||||
import bb.persist_data, bb.utils
|
||||
import bb.checksum
|
||||
from bb import data
|
||||
import bb.process
|
||||
import subprocess
|
||||
|
||||
__version__ = "2"
|
||||
_checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
class BBFetchException(Exception):
|
||||
"""Class all fetch exceptions inherit from"""
|
||||
def __init__(self, message):
|
||||
@@ -226,14 +231,14 @@ class URI(object):
|
||||
# them are not quite RFC compliant.
|
||||
uri, param_str = (uri.split(";", 1) + [None])[:2]
|
||||
|
||||
urlp = urllib.parse.urlparse(uri)
|
||||
urlp = urlparse.urlparse(uri)
|
||||
self.scheme = urlp.scheme
|
||||
|
||||
reparse = 0
|
||||
|
||||
# Coerce urlparse to make URI scheme use netloc
|
||||
if not self.scheme in urllib.parse.uses_netloc:
|
||||
urllib.parse.uses_params.append(self.scheme)
|
||||
if not self.scheme in urlparse.uses_netloc:
|
||||
urlparse.uses_params.append(self.scheme)
|
||||
reparse = 1
|
||||
|
||||
# Make urlparse happy(/ier) by converting local resources
|
||||
@@ -244,7 +249,7 @@ class URI(object):
|
||||
reparse = 1
|
||||
|
||||
if reparse:
|
||||
urlp = urllib.parse.urlparse(uri)
|
||||
urlp = urlparse.urlparse(uri)
|
||||
|
||||
# Identify if the URI is relative or not
|
||||
if urlp.scheme in self._relative_schemes and \
|
||||
@@ -260,7 +265,7 @@ class URI(object):
|
||||
if urlp.password:
|
||||
self.userinfo += ':%s' % urlp.password
|
||||
|
||||
self.path = urllib.parse.unquote(urlp.path)
|
||||
self.path = urllib.unquote(urlp.path)
|
||||
|
||||
if param_str:
|
||||
self.params = self._param_str_split(param_str, ";")
|
||||
@@ -292,7 +297,7 @@ class URI(object):
|
||||
if self.query else '')
|
||||
|
||||
def _param_str_split(self, string, elmdelim, kvdelim="="):
|
||||
ret = collections.OrderedDict()
|
||||
ret = {}
|
||||
for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
|
||||
ret[k] = v
|
||||
return ret
|
||||
@@ -308,11 +313,11 @@ class URI(object):
|
||||
|
||||
@property
|
||||
def path_quoted(self):
|
||||
return urllib.parse.quote(self.path)
|
||||
return urllib.quote(self.path)
|
||||
|
||||
@path_quoted.setter
|
||||
def path_quoted(self, path):
|
||||
self.path = urllib.parse.unquote(path)
|
||||
self.path = urllib.unquote(path)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
@@ -385,7 +390,7 @@ def decodeurl(url):
|
||||
user = ''
|
||||
pswd = ''
|
||||
|
||||
p = collections.OrderedDict()
|
||||
p = {}
|
||||
if parm:
|
||||
for s in parm.split(';'):
|
||||
if s:
|
||||
@@ -394,7 +399,7 @@ def decodeurl(url):
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.parse.unquote(path), user, pswd, p
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
|
||||
def encodeurl(decoded):
|
||||
"""Encodes a URL from tokens (scheme, network location, path,
|
||||
@@ -418,7 +423,7 @@ def encodeurl(decoded):
|
||||
# Standardise path to ensure comparisons work
|
||||
while '//' in path:
|
||||
path = path.replace("//", "/")
|
||||
url += "%s" % urllib.parse.quote(path)
|
||||
url += "%s" % urllib.quote(path)
|
||||
if p:
|
||||
for parm in p:
|
||||
url += ";%s=%s" % (parm, p[parm])
|
||||
@@ -581,12 +586,12 @@ def verify_checksum(ud, d, precomputed={}):
|
||||
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
|
||||
|
||||
# Log missing sums so user can more easily add them
|
||||
logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
|
||||
# We want to alert the user if a checksum is defined in the recipe but
|
||||
# it does not match.
|
||||
@@ -654,9 +659,9 @@ def verify_donestamp(ud, d, origud=None):
|
||||
# files to those containing the checksums.
|
||||
if not isinstance(e, EOFError):
|
||||
# Ignore errors, they aren't fatal
|
||||
logger.warning("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
logger.warn("Couldn't load checksums from donestamp %s: %s "
|
||||
"(msg: %s)" % (ud.donestamp, type(e).__name__,
|
||||
str(e)))
|
||||
|
||||
try:
|
||||
checksums = verify_checksum(ud, d, precomputed_checksums)
|
||||
@@ -670,8 +675,8 @@ def verify_donestamp(ud, d, origud=None):
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warning("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
if os.path.exists(ud.localpath):
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
@@ -703,8 +708,8 @@ def update_stamp(ud, d):
|
||||
except ChecksumError as e:
|
||||
# Checksums failed to verify, trigger re-download and remove the
|
||||
# incorrect stamp file.
|
||||
logger.warning("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
logger.warn("Checksum mismatch for local file %s\n"
|
||||
"Cleaning and trying again." % ud.localpath)
|
||||
if os.path.exists(ud.localpath):
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
bb.utils.remove(ud.donestamp)
|
||||
@@ -803,8 +808,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None):
|
||||
'GIT_SMART_HTTP',
|
||||
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
|
||||
'SOCKS5_USER', 'SOCKS5_PASSWD',
|
||||
'DBUS_SESSION_BUS_ADDRESS',
|
||||
'P4CONFIG']
|
||||
'DBUS_SESSION_BUS_ADDRESS']
|
||||
|
||||
if not cleanup:
|
||||
cleanup = []
|
||||
@@ -980,8 +984,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
|
||||
logger.warning(str(e))
|
||||
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
|
||||
logger.warn(str(e))
|
||||
if os.path.exists(ud.localpath):
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
elif isinstance(e, NoChecksumError):
|
||||
@@ -1196,7 +1200,7 @@ class FetchData(object):
|
||||
raise NonLocalMethod()
|
||||
|
||||
if self.parm.get("proto", None) and "protocol" not in self.parm:
|
||||
logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
|
||||
logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
|
||||
self.parm["protocol"] = self.parm.get("proto", None)
|
||||
|
||||
if hasattr(self.method, "urldata_init"):
|
||||
@@ -1393,18 +1397,7 @@ class FetchMethod(object):
|
||||
else:
|
||||
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
|
||||
elif file.endswith('.deb') or file.endswith('.ipk'):
|
||||
output = subprocess.check_output('ar -t %s' % file, preexec_fn=subprocess_setup, shell=True)
|
||||
datafile = None
|
||||
if output:
|
||||
for line in output.decode().splitlines():
|
||||
if line.startswith('data.tar.'):
|
||||
datafile = line
|
||||
break
|
||||
else:
|
||||
raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
|
||||
else:
|
||||
raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
|
||||
cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
|
||||
cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
|
||||
elif file.endswith('.tar.7z'):
|
||||
cmd = '7z x -so %s | tar xf - ' % file
|
||||
elif file.endswith('.7z'):
|
||||
@@ -1431,7 +1424,7 @@ class FetchMethod(object):
|
||||
if urlpath.find("/") != -1:
|
||||
destdir = urlpath.rsplit("/", 1)[0] + '/'
|
||||
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
|
||||
cmd = 'cp -fpPR %s %s' % (file, destdir)
|
||||
cmd = 'cp -fpPRH %s %s' % (file, destdir)
|
||||
|
||||
if not cmd:
|
||||
return
|
||||
@@ -1603,14 +1596,14 @@ class Fetch(object):
|
||||
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
|
||||
logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
|
||||
logger.debug(1, str(e))
|
||||
if os.path.exists(ud.localpath):
|
||||
rename_bad_checksum(ud, e.checksum)
|
||||
elif isinstance(e, NoChecksumError):
|
||||
raise
|
||||
else:
|
||||
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
# Remove any incomplete fetch
|
||||
@@ -1743,7 +1736,7 @@ class FetchConnectionCache(object):
|
||||
del self.cache[cn]
|
||||
|
||||
def close_connections(self):
|
||||
for cn in list(self.cache.keys()):
|
||||
for cn in self.cache.keys():
|
||||
self.cache[cn].close()
|
||||
del self.cache[cn]
|
||||
|
||||
|
||||
@@ -274,7 +274,7 @@ class Git(FetchMethod):
|
||||
branchname = ud.branches[ud.names[0]]
|
||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
||||
ud.revisions[ud.names[0]]), d)
|
||||
runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
|
||||
runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
|
||||
branchname), d)
|
||||
else:
|
||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||
@@ -350,10 +350,9 @@ class Git(FetchMethod):
|
||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
||||
for s in [head, tag + "^{}", tag]:
|
||||
for l in output.strip().split('\n'):
|
||||
sha1, ref = l.split()
|
||||
if s == ref:
|
||||
return sha1
|
||||
for l in output.split('\n'):
|
||||
if s in l:
|
||||
return l.split()[0]
|
||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ BitBake build tools.
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
import bb
|
||||
import bb.utils
|
||||
from bb import data
|
||||
@@ -42,7 +42,7 @@ class Local(FetchMethod):
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.basename = os.path.basename(ud.decodedurl)
|
||||
ud.basepath = ud.decodedurl
|
||||
ud.needdonestamp = False
|
||||
|
||||
@@ -20,7 +20,7 @@ Usage in the recipe:
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
import json
|
||||
import subprocess
|
||||
import signal
|
||||
@@ -196,9 +196,9 @@ class Npm(FetchMethod):
|
||||
optdepsfound[dep] = dependencies[dep]
|
||||
else:
|
||||
depsfound[dep] = dependencies[dep]
|
||||
for dep, version in optdepsfound.items():
|
||||
for dep, version in optdepsfound.iteritems():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True)
|
||||
for dep, version in depsfound.items():
|
||||
for dep, version in depsfound.iteritems():
|
||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
|
||||
|
||||
def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest):
|
||||
@@ -251,14 +251,14 @@ class Npm(FetchMethod):
|
||||
with open(shwrf) as datafile:
|
||||
shrinkobj = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
|
||||
logger.warn('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
|
||||
lckdf = d.getVar('NPM_LOCKDOWN', True)
|
||||
logger.debug(2, "NPM lockdown file is %s" % lckdf)
|
||||
try:
|
||||
with open(lckdf) as datafile:
|
||||
lockdown = json.load(datafile)
|
||||
except:
|
||||
logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
|
||||
logger.warn('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
|
||||
|
||||
if ('name' not in shrinkobj):
|
||||
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
"""
|
||||
BitBake 'Fetch' implementation for perforce
|
||||
BitBake 'Fetch' implementations
|
||||
|
||||
Classes for obtaining upstream sources for the
|
||||
BitBake build tools.
|
||||
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2016 Kodak Alaris, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -23,7 +25,9 @@ BitBake 'Fetch' implementation for perforce
|
||||
#
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
@@ -33,180 +37,151 @@ from bb.fetch2 import logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
|
||||
class Perforce(FetchMethod):
|
||||
""" Class to fetch from perforce repositories """
|
||||
def supports(self, ud, d):
|
||||
""" Check to see if a given url can be fetched with perforce. """
|
||||
return ud.type in ['p4']
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
Initialize perforce specific variables within url data. If P4CONFIG is
|
||||
provided by the env, use it. If P4PORT is specified by the recipe, use
|
||||
its values, which may override the settings in P4CONFIG.
|
||||
"""
|
||||
ud.basecmd = d.getVar('FETCHCMD_p4', True)
|
||||
if not ud.basecmd:
|
||||
ud.basecmd = "/usr/bin/env p4"
|
||||
|
||||
ud.dldir = d.getVar('P4DIR', True)
|
||||
if not ud.dldir:
|
||||
ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4')
|
||||
|
||||
path = ud.url.split('://')[1]
|
||||
path = path.split(';')[0]
|
||||
delim = path.find('@');
|
||||
def doparse(url, d):
|
||||
parm = {}
|
||||
path = url.split("://")[1]
|
||||
delim = path.find("@");
|
||||
if delim != -1:
|
||||
(ud.user, ud.pswd) = path.split('@')[0].split(':')
|
||||
ud.path = path.split('@')[1]
|
||||
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||
path = path.split('@')[1]
|
||||
else:
|
||||
ud.path = path
|
||||
(host, port) = d.getVar('P4PORT', False).split(':')
|
||||
user = ""
|
||||
pswd = ""
|
||||
|
||||
ud.usingp4config = False
|
||||
p4port = d.getVar('P4PORT', True)
|
||||
if path.find(";") != -1:
|
||||
keys=[]
|
||||
values=[]
|
||||
plist = path.split(';')
|
||||
for item in plist:
|
||||
if item.count('='):
|
||||
(key, value) = item.split('=')
|
||||
keys.append(key)
|
||||
values.append(value)
|
||||
|
||||
if p4port:
|
||||
logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
|
||||
ud.host = p4port
|
||||
else:
|
||||
logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
|
||||
ud.usingp4config = True
|
||||
p4cmd = '%s info | grep "Server address"' % ud.basecmd
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
ud.host = runfetchcmd(p4cmd, d, True)
|
||||
ud.host = ud.host.split(': ')[1].strip()
|
||||
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
|
||||
if not ud.host:
|
||||
raise FetchError('Could not determine P4PORT from P4CONFIG')
|
||||
|
||||
if ud.path.find('/...') >= 0:
|
||||
ud.pathisdir = True
|
||||
else:
|
||||
ud.pathisdir = False
|
||||
parm = dict(zip(keys, values))
|
||||
path = "//" + path.split(';')[0]
|
||||
host += ":%s" % (port)
|
||||
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
|
||||
cleanedhost = ud.host.replace(':', '.')
|
||||
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
|
||||
return host, path, user, pswd, parm
|
||||
doparse = staticmethod(doparse)
|
||||
|
||||
ud.setup_revisons(d)
|
||||
|
||||
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d)
|
||||
|
||||
def _buildp4command(self, ud, d, command, depot_filename=None):
|
||||
"""
|
||||
Build a p4 commandline. Valid commands are "changes", "print", and
|
||||
"files". depot_filename is the full path to the file in the depot
|
||||
including the trailing '#rev' value.
|
||||
"""
|
||||
def getcset(d, depot, host, user, pswd, parm):
|
||||
p4opt = ""
|
||||
if "cset" in parm:
|
||||
return parm["cset"];
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
if ud.user:
|
||||
p4opt += ' -u "%s"' % (ud.user)
|
||||
p4date = d.getVar("P4DATE", True)
|
||||
if "revision" in parm:
|
||||
depot += "#%s" % (parm["revision"])
|
||||
elif "label" in parm:
|
||||
depot += "@%s" % (parm["label"])
|
||||
elif p4date:
|
||||
depot += "@%s" % (p4date)
|
||||
|
||||
if ud.pswd:
|
||||
p4opt += ' -P "%s"' % (ud.pswd)
|
||||
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
|
||||
if ud.host and not ud.usingp4config:
|
||||
p4opt += ' -p %s' % (ud.host)
|
||||
return cset.split(' ')[1]
|
||||
getcset = staticmethod(getcset)
|
||||
|
||||
if hasattr(ud, 'revision') and ud.revision:
|
||||
pathnrev = '%s@%s' % (ud.path, ud.revision)
|
||||
def urldata_init(self, ud, d):
|
||||
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
base_path = path.replace('/...', '')
|
||||
base_path = self._strip_leading_slashes(base_path)
|
||||
|
||||
if "label" in parm:
|
||||
version = parm["label"]
|
||||
else:
|
||||
pathnrev = '%s' % (ud.path)
|
||||
version = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||
|
||||
if depot_filename:
|
||||
if ud.pathisdir: # Remove leading path to obtain filename
|
||||
filename = depot_filename[len(ud.path)-1:]
|
||||
else:
|
||||
filename = depot_filename[depot_filename.rfind('/'):]
|
||||
filename = filename[:filename.find('#')] # Remove trailing '#rev'
|
||||
|
||||
if command == 'changes':
|
||||
p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
|
||||
elif command == 'print':
|
||||
if depot_filename != None:
|
||||
p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
|
||||
else:
|
||||
raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
|
||||
elif command == 'files':
|
||||
p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
|
||||
else:
|
||||
raise FetchError('Invalid p4 command %s' % command, ud.url)
|
||||
|
||||
return p4cmd
|
||||
|
||||
def _p4listfiles(self, ud, d):
|
||||
"""
|
||||
Return a list of the file names which are present in the depot using the
|
||||
'p4 files' command, including trailing '#rev' file revision indicator
|
||||
"""
|
||||
p4cmd = self._buildp4command(ud, d, 'files')
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
p4fileslist = runfetchcmd(p4cmd, d, True)
|
||||
p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
|
||||
|
||||
if not p4fileslist:
|
||||
raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
|
||||
|
||||
count = 0
|
||||
filelist = []
|
||||
|
||||
for filename in p4fileslist:
|
||||
item = filename.split(' - ')
|
||||
lastaction = item[1].split()
|
||||
logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
|
||||
if lastaction[0] == 'delete':
|
||||
continue
|
||||
filelist.append(item[0])
|
||||
|
||||
return filelist
|
||||
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
""" Get the list of files, fetch each one """
|
||||
filelist = self._p4listfiles(ud, d)
|
||||
if not filelist:
|
||||
raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
|
||||
"""
|
||||
Fetch urls
|
||||
"""
|
||||
|
||||
bb.utils.remove(ud.pkgdir, True)
|
||||
bb.utils.mkdirhier(ud.pkgdir)
|
||||
os.chdir(ud.pkgdir)
|
||||
(host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||
|
||||
for afile in filelist:
|
||||
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
|
||||
bb.fetch2.check_network_access(d, p4fetchcmd)
|
||||
runfetchcmd(p4fetchcmd, d)
|
||||
if depot.find('/...') != -1:
|
||||
path = depot[:depot.find('/...')]
|
||||
else:
|
||||
path = depot[:depot.rfind('/')]
|
||||
|
||||
os.chdir(ud.pkgdir)
|
||||
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup = [ud.localpath])
|
||||
module = parm.get('module', os.path.basename(path))
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Cleanup p4 specific files and dirs"""
|
||||
bb.utils.remove(ud.localpath)
|
||||
bb.utils.remove(ud.pkgdir, True)
|
||||
# Get the p4 command
|
||||
p4opt = ""
|
||||
if user:
|
||||
p4opt += " -u %s" % (user)
|
||||
|
||||
def supports_srcrev(self):
|
||||
return True
|
||||
if pswd:
|
||||
p4opt += " -P %s" % (pswd)
|
||||
|
||||
def _revision_key(self, ud, d, name):
|
||||
""" Return a unique key for the url """
|
||||
return 'p4:%s' % ud.pkgdir
|
||||
if host:
|
||||
p4opt += " -p %s" % (host)
|
||||
|
||||
def _latest_revision(self, ud, d, name):
|
||||
""" Return the latest upstream scm revision number """
|
||||
p4cmd = self._buildp4command(ud, d, "changes")
|
||||
bb.fetch2.check_network_access(d, p4cmd)
|
||||
tip = runfetchcmd(p4cmd, d, True)
|
||||
p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"
|
||||
|
||||
if not tip:
|
||||
raise FetchError('Could not determine the latest perforce changelist')
|
||||
# create temp directory
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(d.expand('${WORKDIR}'))
|
||||
mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
|
||||
tmpfile, errors = bb.process.run(mktemp)
|
||||
tmpfile = tmpfile.strip()
|
||||
if not tmpfile:
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url)
|
||||
|
||||
tipcset = tip.split(' ')[1]
|
||||
logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
|
||||
return tipcset
|
||||
if "label" in parm:
|
||||
depot = "%s@%s" % (depot, parm["label"])
|
||||
else:
|
||||
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||
depot = "%s@%s" % (depot, cset)
|
||||
|
||||
def sortable_revision(self, ud, d, name):
|
||||
""" Return a sortable revision number """
|
||||
return False, self._build_revision(ud, d)
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + ud.url)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = [f.rstrip() for f in p4file.splitlines()]
|
||||
|
||||
def _build_revision(self, ud, d):
|
||||
return ud.revision
|
||||
if not p4file:
|
||||
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url)
|
||||
|
||||
count = 0
|
||||
|
||||
for file in p4file:
|
||||
list = file.split()
|
||||
|
||||
if list[2] == "delete":
|
||||
continue
|
||||
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
logger.error()
|
||||
raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url)
|
||||
|
||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
|
||||
# cleanup
|
||||
bb.utils.prunedir(tmpfile)
|
||||
|
||||
@@ -61,7 +61,8 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
|
||||
|
||||
import os
|
||||
import bb
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
import commands
|
||||
from bb import data
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -92,7 +93,7 @@ class SFTP(FetchMethod):
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
|
||||
def download(self, ud, d):
|
||||
"""Fetch urls"""
|
||||
@@ -120,7 +121,8 @@ class SFTP(FetchMethod):
|
||||
|
||||
remote = '%s%s:%s' % (user, urlo.hostname, path)
|
||||
|
||||
cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
|
||||
cmd = '%s %s %s %s' % (basecmd, port, commands.mkarg(remote),
|
||||
commands.mkarg(lpath))
|
||||
|
||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
||||
runfetchcmd(cmd, d)
|
||||
|
||||
@@ -114,10 +114,12 @@ class SSH(FetchMethod):
|
||||
fr = host
|
||||
fr += ':%s' % path
|
||||
|
||||
|
||||
import commands
|
||||
cmd = 'scp -B -r %s %s %s/' % (
|
||||
portarg,
|
||||
fr,
|
||||
dldir
|
||||
commands.mkarg(fr),
|
||||
commands.mkarg(dldir)
|
||||
)
|
||||
|
||||
bb.fetch2.check_network_access(d, cmd, urldata.url)
|
||||
|
||||
@@ -31,7 +31,7 @@ import subprocess
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod
|
||||
from bb.fetch2 import FetchError
|
||||
@@ -62,9 +62,9 @@ class Wget(FetchMethod):
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
if not ud.localfile:
|
||||
ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.host + ud.path).replace("/", "."), d)
|
||||
|
||||
self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
|
||||
|
||||
@@ -104,12 +104,12 @@ class Wget(FetchMethod):
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, fetch, ud, d):
|
||||
import urllib.request, urllib.error, urllib.parse, socket, http.client
|
||||
from urllib.response import addinfourl
|
||||
def checkstatus(self, fetch, ud, d, try_again=True):
|
||||
import urllib2, socket, httplib
|
||||
from urllib import addinfourl
|
||||
from bb.fetch2 import FetchConnectionCache
|
||||
|
||||
class HTTPConnectionCache(http.client.HTTPConnection):
|
||||
class HTTPConnectionCache(httplib.HTTPConnection):
|
||||
if fetch.connection_cache:
|
||||
def connect(self):
|
||||
"""Connect to the host and port specified in __init__."""
|
||||
@@ -125,7 +125,7 @@ class Wget(FetchMethod):
|
||||
if self._tunnel_host:
|
||||
self._tunnel()
|
||||
|
||||
class CacheHTTPHandler(urllib.request.HTTPHandler):
|
||||
class CacheHTTPHandler(urllib2.HTTPHandler):
|
||||
def http_open(self, req):
|
||||
return self.do_open(HTTPConnectionCache, req)
|
||||
|
||||
@@ -139,7 +139,7 @@ class Wget(FetchMethod):
|
||||
- geturl(): return the original request URL
|
||||
- code: HTTP status code
|
||||
"""
|
||||
host = req.host
|
||||
host = req.get_host()
|
||||
if not host:
|
||||
raise urlllib2.URLError('no host given')
|
||||
|
||||
@@ -147,7 +147,7 @@ class Wget(FetchMethod):
|
||||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in list(req.headers.items())
|
||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
||||
if k not in headers))
|
||||
|
||||
# We want to make an HTTP/1.1 request, but the addinfourl
|
||||
@@ -164,7 +164,7 @@ class Wget(FetchMethod):
|
||||
headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
|
||||
|
||||
headers = dict(
|
||||
(name.title(), val) for name, val in list(headers.items()))
|
||||
(name.title(), val) for name, val in headers.items())
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
@@ -177,12 +177,12 @@ class Wget(FetchMethod):
|
||||
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
|
||||
|
||||
try:
|
||||
h.request(req.get_method(), req.selector, req.data, headers)
|
||||
except socket.error as err: # XXX what error?
|
||||
h.request(req.get_method(), req.get_selector(), req.data, headers)
|
||||
except socket.error, err: # XXX what error?
|
||||
# Don't close connection when cache is enabled.
|
||||
if fetch.connection_cache is None:
|
||||
h.close()
|
||||
raise urllib.error.URLError(err)
|
||||
raise urllib2.URLError(err)
|
||||
else:
|
||||
try:
|
||||
r = h.getresponse(buffering=True)
|
||||
@@ -222,7 +222,7 @@ class Wget(FetchMethod):
|
||||
|
||||
return resp
|
||||
|
||||
class HTTPMethodFallback(urllib.request.BaseHandler):
|
||||
class HTTPMethodFallback(urllib2.BaseHandler):
|
||||
"""
|
||||
Fallback to GET if HEAD is not allowed (405 HTTP error)
|
||||
"""
|
||||
@@ -230,11 +230,11 @@ class Wget(FetchMethod):
|
||||
fp.read()
|
||||
fp.close()
|
||||
|
||||
newheaders = dict((k,v) for k,v in list(req.headers.items())
|
||||
newheaders = dict((k,v) for k,v in req.headers.items()
|
||||
if k.lower() not in ("content-length", "content-type"))
|
||||
return self.parent.open(urllib.request.Request(req.get_full_url(),
|
||||
return self.parent.open(urllib2.Request(req.get_full_url(),
|
||||
headers=newheaders,
|
||||
origin_req_host=req.origin_req_host,
|
||||
origin_req_host=req.get_origin_req_host(),
|
||||
unverifiable=True))
|
||||
|
||||
"""
|
||||
@@ -249,38 +249,42 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
http_error_406 = http_error_405
|
||||
|
||||
class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
|
||||
class FixedHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
|
||||
"""
|
||||
urllib2.HTTPRedirectHandler resets the method to GET on redirect,
|
||||
when we want to follow redirects using the original method.
|
||||
"""
|
||||
def redirect_request(self, req, fp, code, msg, headers, newurl):
|
||||
newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
|
||||
newreq = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
|
||||
newreq.get_method = lambda: req.get_method()
|
||||
return newreq
|
||||
exported_proxies = export_proxies(d)
|
||||
|
||||
handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
|
||||
if export_proxies:
|
||||
handlers.append(urllib.request.ProxyHandler())
|
||||
handlers.append(urllib2.ProxyHandler())
|
||||
handlers.append(CacheHTTPHandler())
|
||||
# XXX: Since Python 2.7.9 ssl cert validation is enabled by default
|
||||
# see PEP-0476, this causes verification errors on some https servers
|
||||
# so disable by default.
|
||||
import ssl
|
||||
if hasattr(ssl, '_create_unverified_context'):
|
||||
handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
|
||||
opener = urllib.request.build_opener(*handlers)
|
||||
handlers.append(urllib2.HTTPSHandler(context=ssl._create_unverified_context()))
|
||||
opener = urllib2.build_opener(*handlers)
|
||||
|
||||
try:
|
||||
uri = ud.url.split(";")[0]
|
||||
r = urllib.request.Request(uri)
|
||||
r = urllib2.Request(uri)
|
||||
r.get_method = lambda: "HEAD"
|
||||
opener.open(r)
|
||||
except urllib.error.URLError as e:
|
||||
# debug for now to avoid spamming the logs in e.g. remote sstate searches
|
||||
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
|
||||
return False
|
||||
except urllib2.URLError as e:
|
||||
if try_again:
|
||||
logger.debug(2, "checkstatus: trying again")
|
||||
return self.checkstatus(fetch, ud, d, False)
|
||||
else:
|
||||
# debug for now to avoid spamming the logs in e.g. remote sstate searches
|
||||
logger.debug(2, "checkstatus() urlopen failed: %s" % e)
|
||||
return False
|
||||
return True
|
||||
|
||||
def _parse_path(self, regex, s):
|
||||
|
||||
@@ -27,7 +27,6 @@ import sys
|
||||
import logging
|
||||
import optparse
|
||||
import warnings
|
||||
import fcntl
|
||||
|
||||
import bb
|
||||
from bb import event
|
||||
@@ -101,12 +100,11 @@ def import_extension_module(pkg, modulename, checkattr):
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
# suggest a fixed set this allows you to have flexibility in which
|
||||
# ones are available.
|
||||
module = __import__(pkg.__name__, fromlist=[modulename])
|
||||
module = __import__(pkg.__name__, fromlist = [modulename])
|
||||
return getattr(module, modulename)
|
||||
except AttributeError:
|
||||
modules = present_options(list_extension_modules(pkg, checkattr))
|
||||
raise BBMainException('FATAL: Unable to import extension module "%s" from %s. '
|
||||
'Valid extension modules: %s' % (modulename, pkg.__name__, modules))
|
||||
raise BBMainException('FATAL: Unable to import extension module "%s" from %s. Valid extension modules: %s' % (modulename, pkg.__name__, present_options(list_extension_modules(pkg, checkattr))))
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
warnlog = logging.getLogger("BitBake.Warnings")
|
||||
@@ -117,7 +115,7 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||
else:
|
||||
s = warnings.formatwarning(message, category, filename, lineno)
|
||||
warnlog.warning(s)
|
||||
warnlog.warn(s)
|
||||
|
||||
warnings.showwarning = _showwarning
|
||||
warnings.filterwarnings("ignore")
|
||||
@@ -131,166 +129,131 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
|
||||
|
||||
def parseCommandLine(self, argv=sys.argv):
|
||||
parser = optparse.OptionParser(
|
||||
formatter=BitbakeHelpFormatter(),
|
||||
version="BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage="""%prog [options] [recipename/target recipe:do_task ...]
|
||||
formatter = BitbakeHelpFormatter(),
|
||||
version = "BitBake Build Tool Core version %s" % bb.__version__,
|
||||
usage = """%prog [options] [recipename/target recipe:do_task ...]
|
||||
|
||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
||||
will provide the layer, BBFILES and other configuration information.""")
|
||||
|
||||
parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None,
|
||||
help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
|
||||
"not handle any dependencies from other recipes.")
|
||||
parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
|
||||
action = "store", dest = "buildfile", default = None)
|
||||
|
||||
parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True,
|
||||
help="Continue as much as possible after an error. While the target that "
|
||||
"failed and anything depending on it cannot be built, as much as "
|
||||
"possible will be built before stopping.")
|
||||
parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
|
||||
action = "store_false", dest = "abort", default = True)
|
||||
|
||||
parser.add_option("-a", "--tryaltconfigs", action="store_true",
|
||||
dest="tryaltconfigs", default=False,
|
||||
help="Continue with builds by trying to use alternative providers "
|
||||
"where possible.")
|
||||
parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
|
||||
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||
|
||||
parser.add_option("-f", "--force", action="store_true", dest="force", default=False,
|
||||
help="Force the specified targets/task to run (invalidating any "
|
||||
"existing stamp file).")
|
||||
parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
|
||||
action = "store_true", dest = "force", default = False)
|
||||
|
||||
parser.add_option("-c", "--cmd", action="store", dest="cmd",
|
||||
help="Specify the task to execute. The exact options available "
|
||||
"depend on the metadata. Some examples might be 'compile'"
|
||||
" or 'populate_sysroot' or 'listtasks' may give a list of "
|
||||
"the tasks available.")
|
||||
parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp",
|
||||
help="Invalidate the stamp for the specified task such as 'compile' "
|
||||
"and then run the default task for the specified target(s).")
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
|
||||
help="Read the specified file before bitbake.conf.")
|
||||
parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
|
||||
help="Read the specified file after bitbake.conf.")
|
||||
parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
|
||||
action = "append", dest = "postfile", default = [])
|
||||
|
||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
|
||||
help="Output more log message data to the terminal.")
|
||||
parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
|
||||
action = "store_true", dest = "verbose", default = False)
|
||||
|
||||
parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
|
||||
help="Increase the debug level. You can specify this more than once.")
|
||||
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||
action = "count", dest="debug", default = 0)
|
||||
|
||||
parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
|
||||
help="Don't execute, just go through the motions.")
|
||||
parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
|
||||
action = "store_true", dest = "dry_run", default = False)
|
||||
|
||||
parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
|
||||
default=[], metavar="SIGNATURE_HANDLER",
|
||||
help="Dump out the signature construction information, with no task "
|
||||
"execution. The SIGNATURE_HANDLER parameter is passed to the "
|
||||
"handler. Two common values are none and printdiff but the handler "
|
||||
"may define more/less. none means only dump the signature, printdiff"
|
||||
" means compare the dumped signature with the cached one.")
|
||||
parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
|
||||
action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
|
||||
|
||||
parser.add_option("-p", "--parse-only", action="store_true",
|
||||
dest="parse_only", default=False,
|
||||
help="Quit after parsing the BB recipes.")
|
||||
parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", action="store_true",
|
||||
dest="show_versions", default=False,
|
||||
help="Show current and preferred versions of all recipes.")
|
||||
parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", action="store_true",
|
||||
dest="show_environment", default=False,
|
||||
help="Show the global or per-recipe environment complete with information"
|
||||
" about where variables were set/changed.")
|
||||
parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
|
||||
help="Save dependency tree information for the specified "
|
||||
"targets in the dot syntax.")
|
||||
parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", action="append",
|
||||
dest="extra_assume_provided", default=[],
|
||||
help="Assume these dependencies don't exist and are already provided "
|
||||
"(equivalent to ASSUME_PROVIDED). Useful to make dependency "
|
||||
"graphs more appealing")
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
action = "append", dest = "extra_assume_provided", default = [])
|
||||
|
||||
parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
|
||||
help="Show debug logging for the specified logging domains")
|
||||
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||
action = "append", dest = "debug_domains", default = [])
|
||||
|
||||
parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
|
||||
help="Profile the command and save reports.")
|
||||
parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
|
||||
action = "store_true", dest = "profile", default = False)
|
||||
|
||||
env_ui = os.environ.get('BITBAKE_UI', None)
|
||||
default_ui = env_ui or 'knotty'
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-u", "--ui", help = "The user interface to use (@CHOICES@ - default %default).",
|
||||
action="store", dest="ui", default=default_ui)
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-u", "--ui", action="store", dest="ui",
|
||||
default=os.environ.get('BITBAKE_UI', 'knotty'),
|
||||
help="The user interface to use (@CHOICES@ - default %default).")
|
||||
parser.add_option("-t", "--servertype", help = "Choose which server type to use (@CHOICES@ - default %default).",
|
||||
action = "store", dest = "servertype", default = "process")
|
||||
|
||||
# @CHOICES@ is substituted out by BitbakeHelpFormatter above
|
||||
parser.add_option("-t", "--servertype", action="store", dest="servertype",
|
||||
default=["process", "xmlrpc"]["BBSERVER" in os.environ],
|
||||
help="Choose which server type to use (@CHOICES@ - default %default).")
|
||||
parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
|
||||
action = "store", dest = "xmlrpctoken")
|
||||
|
||||
parser.add_option("", "--token", action="store", dest="xmlrpctoken",
|
||||
default=os.environ.get("BBTOKEN"),
|
||||
help="Specify the connection token to be used when connecting "
|
||||
"to a remote server.")
|
||||
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
|
||||
action = "store_true", dest = "revisions_changed", default = False)
|
||||
|
||||
parser.add_option("", "--revisions-changed", action="store_true",
|
||||
dest="revisions_changed", default=False,
|
||||
help="Set the exit code depending on whether upstream floating "
|
||||
"revisions have changed or not.")
|
||||
parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
|
||||
action = "store_true", dest = "server_only", default = False)
|
||||
|
||||
parser.add_option("", "--server-only", action="store_true",
|
||||
dest="server_only", default=False,
|
||||
help="Run bitbake without a UI, only starting a server "
|
||||
"(cooker) process.")
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
|
||||
action = "store", dest = "bind", default = False)
|
||||
|
||||
parser.add_option("-B", "--bind", action="store", dest="bind", default=False,
|
||||
help="The name/address for the bitbake server to bind to.")
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
|
||||
parser.add_option("", "--no-setscene", action="store_true",
|
||||
dest="nosetscene", default=False,
|
||||
help="Do not run any setscene tasks. sstate will be ignored and "
|
||||
"everything needed, built.")
|
||||
parser.add_option("", "--setscene-only", help = "Only run setscene tasks, don't run any real tasks.",
|
||||
action = "store_true", dest = "setsceneonly", default = False)
|
||||
|
||||
parser.add_option("", "--setscene-only", action="store_true",
|
||||
dest="setsceneonly", default=False,
|
||||
help="Only run setscene tasks, don't run any real tasks.")
|
||||
parser.add_option("", "--remote-server", help = "Connect to the specified server.",
|
||||
action = "store", dest = "remote_server", default = False)
|
||||
|
||||
parser.add_option("", "--remote-server", action="store", dest="remote_server",
|
||||
default=os.environ.get("BBSERVER"),
|
||||
help="Connect to the specified server.")
|
||||
parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
|
||||
action = "store_true", dest = "kill_server", default = False)
|
||||
|
||||
parser.add_option("-m", "--kill-server", action="store_true",
|
||||
dest="kill_server", default=False,
|
||||
help="Terminate the remote server.")
|
||||
parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
|
||||
action = "store_true", dest = "observe_only", default = False)
|
||||
|
||||
parser.add_option("", "--observe-only", action="store_true",
|
||||
dest="observe_only", default=False,
|
||||
help="Connect to a server as an observing-only client.")
|
||||
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
|
||||
action = "store_true", dest = "status_only", default = False)
|
||||
|
||||
parser.add_option("", "--status-only", action="store_true",
|
||||
dest="status_only", default=False,
|
||||
help="Check the status of the remote bitbake server.")
|
||||
|
||||
parser.add_option("-w", "--write-log", action="store", dest="writeeventlog",
|
||||
default=os.environ.get("BBEVENTLOG"),
|
||||
help="Writes the event log of the build to a bitbake event json file. "
|
||||
"Use '' (empty string) to assign the name automatically.")
|
||||
parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
|
||||
action = "store", dest = "writeeventlog")
|
||||
|
||||
options, targets = parser.parse_args(argv)
|
||||
|
||||
# use configuration files from environment variables
|
||||
if "BBPRECONF" in os.environ:
|
||||
options.prefile.append(os.environ["BBPRECONF"])
|
||||
# some environmental variables set also configuration options
|
||||
if "BBSERVER" in os.environ:
|
||||
options.servertype = "xmlrpc"
|
||||
options.remote_server = os.environ["BBSERVER"]
|
||||
|
||||
if "BBPOSTCONF" in os.environ:
|
||||
options.postfile.append(os.environ["BBPOSTCONF"])
|
||||
if "BBTOKEN" in os.environ:
|
||||
options.xmlrpctoken = os.environ["BBTOKEN"]
|
||||
|
||||
if "BBEVENTLOG" in os.environ:
|
||||
options.writeeventlog = os.environ["BBEVENTLOG"]
|
||||
|
||||
# fill in proper log name if not supplied
|
||||
if options.writeeventlog is not None and len(options.writeeventlog) == 0:
|
||||
from datetime import datetime
|
||||
eventlog = "bitbake_eventlog_%s.json" % datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
options.writeeventlog = eventlog
|
||||
import datetime
|
||||
options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
# if BBSERVER says to autodetect, let's do that
|
||||
if options.remote_server:
|
||||
@@ -323,7 +286,7 @@ def start_server(servermodule, configParams, configuration, features):
|
||||
if configParams.bind:
|
||||
(host, port) = configParams.bind.split(':')
|
||||
server.initServer((host, int(port)), single_use)
|
||||
configuration.interface = [server.serverImpl.host, server.serverImpl.port]
|
||||
configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
|
||||
else:
|
||||
server.initServer(single_use=single_use)
|
||||
configuration.interface = []
|
||||
@@ -336,15 +299,19 @@ def start_server(servermodule, configParams, configuration, features):
|
||||
server.addcooker(cooker)
|
||||
server.saveConnectionDetails()
|
||||
except Exception as e:
|
||||
exc_info = sys.exc_info()
|
||||
while hasattr(server, "event_queue"):
|
||||
import queue
|
||||
try:
|
||||
import queue
|
||||
except ImportError:
|
||||
import Queue as queue
|
||||
try:
|
||||
event = server.event_queue.get(block=False)
|
||||
except (queue.Empty, IOError):
|
||||
break
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
raise
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
server.detach()
|
||||
cooker.lock.close()
|
||||
return server
|
||||
@@ -361,10 +328,7 @@ def bitbake_main(configParams, configuration):
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
# Reopen with O_SYNC (unbuffered)
|
||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
||||
fl |= os.O_SYNC
|
||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -384,7 +348,7 @@ def bitbake_main(configParams, configuration):
|
||||
if configParams.remote_server:
|
||||
raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
|
||||
("the BBSERVER environment variable" if "BBSERVER" in os.environ \
|
||||
else "the '--remote-server' option"))
|
||||
else "the '--remote-server' option" ))
|
||||
|
||||
if configParams.bind and configParams.servertype != "xmlrpc":
|
||||
raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
|
||||
@@ -399,8 +363,7 @@ def bitbake_main(configParams, configuration):
|
||||
"connecting to a server.\n")
|
||||
|
||||
if configParams.kill_server and not configParams.remote_server:
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to "
|
||||
"terminate a remote server")
|
||||
raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
|
||||
|
||||
if "BBDEBUG" in os.environ:
|
||||
level = int(os.environ["BBDEBUG"])
|
||||
@@ -408,7 +371,7 @@ def bitbake_main(configParams, configuration):
|
||||
configuration.debug = level
|
||||
|
||||
bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
|
||||
configuration.debug_domains)
|
||||
configuration.debug_domains)
|
||||
|
||||
# Ensure logging messages get sent to the UI as events
|
||||
handler = bb.event.LogHandler()
|
||||
@@ -437,8 +400,7 @@ def bitbake_main(configParams, configuration):
|
||||
bb.event.ui_queue = []
|
||||
else:
|
||||
# we start a stub server that is actually a XMLRPClient that connects to a real server
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only,
|
||||
configParams.xmlrpctoken)
|
||||
server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
|
||||
server.saveConnectionDetails(configParams.remote_server)
|
||||
|
||||
|
||||
@@ -467,14 +429,12 @@ def bitbake_main(configParams, configuration):
|
||||
return 0
|
||||
|
||||
try:
|
||||
return ui_module.main(server_connection.connection, server_connection.events,
|
||||
configParams)
|
||||
return ui_module.main(server_connection.connection, server_connection.events, configParams)
|
||||
finally:
|
||||
bb.event.ui_queue = []
|
||||
server_connection.terminate()
|
||||
else:
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host,
|
||||
server.serverImpl.port))
|
||||
print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
|
||||
return 0
|
||||
|
||||
return 1
|
||||
|
||||
@@ -220,7 +220,7 @@ class diskMonitor:
|
||||
if minSpace and freeSpace < minSpace:
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]:
|
||||
logger.warning("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
logger.warn("The free space of %s (%s) is running low (%.3fGB left)" % \
|
||||
(path, dev, freeSpace / 1024 / 1024 / 1024.0))
|
||||
self.preFreeS[k] = freeSpace
|
||||
|
||||
@@ -246,7 +246,7 @@ class diskMonitor:
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[k] == 0 or self.preFreeI[k] - freeInode > self.inodeInterval and not self.checked[k]:
|
||||
logger.warning("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
logger.warn("The free inode of %s (%s) is running low (%.3fK left)" % \
|
||||
(path, dev, freeInode / 1024.0))
|
||||
self.preFreeI[k] = freeInode
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ class BBLogFormatter(logging.Formatter):
|
||||
}
|
||||
|
||||
color_enabled = False
|
||||
BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(29,38))
|
||||
BASECOLOR, BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(29,38)
|
||||
|
||||
COLORS = {
|
||||
DEBUG3 : CYAN,
|
||||
@@ -90,9 +90,8 @@ class BBLogFormatter(logging.Formatter):
|
||||
if self.color_enabled:
|
||||
record = self.colorize(record)
|
||||
msg = logging.Formatter.format(self, record)
|
||||
if hasattr(record, 'bb_exc_formatted'):
|
||||
msg += '\n' + ''.join(record.bb_exc_formatted)
|
||||
elif hasattr(record, 'bb_exc_info'):
|
||||
|
||||
if hasattr(record, 'bb_exc_info'):
|
||||
etype, value, tb = record.bb_exc_info
|
||||
formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
||||
msg += '\n' + ''.join(formatted)
|
||||
|
||||
@@ -21,7 +21,8 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
from future_builtins import filter
|
||||
import re
|
||||
import string
|
||||
import logging
|
||||
@@ -138,7 +139,7 @@ class DataNode(AstNode):
|
||||
data.setVar(key, val, parsing=True, **loginfo)
|
||||
|
||||
class MethodNode(AstNode):
|
||||
tr_tbl = str.maketrans('/.+-@%&', '_______')
|
||||
tr_tbl = string.maketrans('/.+-@%&', '_______')
|
||||
|
||||
def __init__(self, filename, lineno, func_name, body, python, fakeroot):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
@@ -306,8 +307,7 @@ def handleInherit(statements, filename, lineno, m):
|
||||
statements.append(InheritNode(filename, lineno, classes))
|
||||
|
||||
def finalize(fn, d, variant = None):
|
||||
saved_handlers = bb.event.get_handlers().copy()
|
||||
|
||||
all_handlers = {}
|
||||
for var in d.getVar('__BBHANDLERS', False) or []:
|
||||
# try to add the handler
|
||||
handlerfn = d.getVarFlag(var, "filename", False)
|
||||
@@ -332,7 +332,6 @@ def finalize(fn, d, variant = None):
|
||||
d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
|
||||
|
||||
bb.event.fire(bb.event.RecipeParsed(fn), d)
|
||||
bb.event.set_handlers(saved_handlers)
|
||||
|
||||
def _create_variants(datastores, names, function, onlyfinalise):
|
||||
def create_variant(name, orig_d, arg = None):
|
||||
@@ -342,17 +341,17 @@ def _create_variants(datastores, names, function, onlyfinalise):
|
||||
function(arg or name, new_d)
|
||||
datastores[name] = new_d
|
||||
|
||||
for variant in list(datastores.keys()):
|
||||
for variant, variant_d in datastores.items():
|
||||
for name in names:
|
||||
if not variant:
|
||||
# Based on main recipe
|
||||
create_variant(name, datastores[""])
|
||||
create_variant(name, variant_d)
|
||||
else:
|
||||
create_variant("%s-%s" % (variant, name), datastores[variant], name)
|
||||
create_variant("%s-%s" % (variant, name), variant_d, name)
|
||||
|
||||
def _expand_versions(versions):
|
||||
def expand_one(version, start, end):
|
||||
for i in range(start, end + 1):
|
||||
for i in xrange(start, end + 1):
|
||||
ver = _bbversions_re.sub(str(i), version, 1)
|
||||
yield ver
|
||||
|
||||
@@ -461,16 +460,16 @@ def multi_finalize(fn, d):
|
||||
safe_d.setVar("BBCLASSEXTEND", extended)
|
||||
_create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
|
||||
|
||||
for variant in datastores.keys():
|
||||
for variant, variant_d in datastores.iteritems():
|
||||
if variant:
|
||||
try:
|
||||
if not onlyfinalise or variant in onlyfinalise:
|
||||
finalize(fn, datastores[variant], variant)
|
||||
finalize(fn, variant_d, variant)
|
||||
except bb.parse.SkipRecipe as e:
|
||||
datastores[variant].setVar("__SKIPPED", e.args[0])
|
||||
variant_d.setVar("__SKIPPED", e.args[0])
|
||||
|
||||
if len(datastores) > 1:
|
||||
variants = filter(None, datastores.keys())
|
||||
variants = filter(None, datastores.iterkeys())
|
||||
safe_d.setVar("__VARIANTS", " ".join(variants))
|
||||
|
||||
datastores[""] = d
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
|
||||
from __future__ import absolute_import
|
||||
import re, bb, os
|
||||
import logging
|
||||
import bb.build, bb.utils
|
||||
|
||||
@@ -84,13 +84,13 @@ def include(parentfn, fn, lineno, data, error_out):
|
||||
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
|
||||
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
|
||||
if abs_fn and bb.parse.check_dependency(data, abs_fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
|
||||
for af in attempts:
|
||||
bb.parse.mark_dependency(data, af)
|
||||
if abs_fn:
|
||||
fn = abs_fn
|
||||
elif bb.parse.check_dependency(data, fn):
|
||||
logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
|
||||
|
||||
try:
|
||||
bb.parse.handle(fn, data, True)
|
||||
|
||||
@@ -92,9 +92,9 @@ class SQLTable(collections.MutableMapping):
|
||||
self._execute("DELETE from %s where key=?;" % self.table, [key])
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, str):
|
||||
if not isinstance(key, basestring):
|
||||
raise TypeError('Only string keys are supported')
|
||||
elif not isinstance(value, str):
|
||||
elif not isinstance(value, basestring):
|
||||
raise TypeError('Only string values are supported')
|
||||
|
||||
data = self._execute("SELECT * from %s where key=?;" %
|
||||
@@ -131,14 +131,14 @@ class SQLTable(collections.MutableMapping):
|
||||
return [row[1] for row in data]
|
||||
|
||||
def values(self):
|
||||
return list(self.values())
|
||||
return list(self.itervalues())
|
||||
|
||||
def itervalues(self):
|
||||
data = self._execute("SELECT value FROM %s;" % self.table)
|
||||
return (row[0] for row in data)
|
||||
|
||||
def items(self):
|
||||
return list(self.items())
|
||||
return list(self.iteritems())
|
||||
|
||||
def iteritems(self):
|
||||
return self._execute("SELECT * FROM %s;" % self.table)
|
||||
@@ -178,7 +178,7 @@ class PersistData(object):
|
||||
"""
|
||||
Return a list of key + value pairs for a domain
|
||||
"""
|
||||
return list(self.data[domain].items())
|
||||
return self.data[domain].items()
|
||||
|
||||
def getValue(self, domain, key):
|
||||
"""
|
||||
|
||||
@@ -17,7 +17,7 @@ class CmdError(RuntimeError):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
if not isinstance(self.command, str):
|
||||
if not isinstance(self.command, basestring):
|
||||
cmd = subprocess.list2cmdline(self.command)
|
||||
else:
|
||||
cmd = self.command
|
||||
@@ -97,8 +97,6 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
rlist = rin
|
||||
stdoutbuf = b""
|
||||
stderrbuf = b""
|
||||
try:
|
||||
r,w,e = select.select (rlist, [], [], 1)
|
||||
except OSError as e:
|
||||
@@ -106,26 +104,16 @@ def _logged_communicate(pipe, log, input, extrafiles):
|
||||
raise
|
||||
|
||||
if pipe.stdout in r:
|
||||
data = stdoutbuf + pipe.stdout.read()
|
||||
if data is not None and len(data) > 0:
|
||||
try:
|
||||
data = data.decode("utf-8")
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
stdoutbuf = b""
|
||||
except UnicodeDecodeError:
|
||||
stdoutbuf = data
|
||||
data = pipe.stdout.read()
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stderr in r:
|
||||
data = stderrbuf + pipe.stderr.read()
|
||||
if data is not None and len(data) > 0:
|
||||
try:
|
||||
data = data.decode("utf-8")
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
stderrbuf = b""
|
||||
except UnicodeDecodeError:
|
||||
stderrbuf = data
|
||||
data = pipe.stderr.read()
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
readextras(r)
|
||||
|
||||
@@ -147,7 +135,7 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
if not extrafiles:
|
||||
extrafiles = []
|
||||
|
||||
if isinstance(cmd, str) and not "shell" in options:
|
||||
if isinstance(cmd, basestring) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
try:
|
||||
@@ -162,10 +150,6 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
|
||||
stdout, stderr = _logged_communicate(pipe, log, input, extrafiles)
|
||||
else:
|
||||
stdout, stderr = pipe.communicate(input)
|
||||
if stdout:
|
||||
stdout = stdout.decode("utf-8")
|
||||
if stderr:
|
||||
stderr = stderr.decode("utf-8")
|
||||
|
||||
if pipe.returncode != 0:
|
||||
raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
|
||||
|
||||
@@ -245,7 +245,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
|
||||
pkg_pn[pn] = []
|
||||
pkg_pn[pn].append(p)
|
||||
|
||||
logger.debug(1, "providers for %s are: %s", item, list(pkg_pn.keys()))
|
||||
logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
|
||||
|
||||
# First add PREFERRED_VERSIONS
|
||||
for pn in pkg_pn:
|
||||
|
||||
@@ -527,7 +527,7 @@ def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
|
||||
print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
|
||||
|
||||
# Scan pattern arguments and append a space if necessary
|
||||
for i in range(len(args)):
|
||||
for i in xrange(len(args)):
|
||||
if not RE_SED.search(args[i]):
|
||||
continue
|
||||
args[i] = args[i] + ' '
|
||||
|
||||
@@ -474,7 +474,7 @@ class Environment:
|
||||
"""
|
||||
# Save and remove previous arguments
|
||||
prevargs = []
|
||||
for i in range(int(self._env['#'])):
|
||||
for i in xrange(int(self._env['#'])):
|
||||
i = str(i+1)
|
||||
prevargs.append(self._env[i])
|
||||
del self._env[i]
|
||||
@@ -488,7 +488,7 @@ class Environment:
|
||||
return prevargs
|
||||
|
||||
def get_positional_args(self):
|
||||
return [self._env[str(i+1)] for i in range(int(self._env['#']))]
|
||||
return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
|
||||
|
||||
def get_variables(self):
|
||||
return dict(self._env)
|
||||
|
||||
@@ -20,7 +20,7 @@ except NameError:
|
||||
from Set import Set as set
|
||||
|
||||
from ply import lex
|
||||
from bb.pysh.sherrors import *
|
||||
from sherrors import *
|
||||
|
||||
class NeedMore(Exception):
|
||||
pass
|
||||
|
||||
@@ -10,11 +10,11 @@
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import bb.pysh.pyshlex as pyshlex
|
||||
import pyshlex
|
||||
tokens = pyshlex.tokens
|
||||
|
||||
from ply import yacc
|
||||
import bb.pysh.sherrors as sherrors
|
||||
import sherrors
|
||||
|
||||
class IORedirect:
|
||||
def __init__(self, op, filename, io_number=None):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -30,7 +30,7 @@ import signal
|
||||
import sys
|
||||
import time
|
||||
import select
|
||||
from queue import Empty
|
||||
from Queue import Empty
|
||||
from multiprocessing import Event, Process, util, Queue, Pipe, queues, Manager
|
||||
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
@@ -137,7 +137,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
if not fds:
|
||||
fds = []
|
||||
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
if retval is False:
|
||||
@@ -145,7 +145,7 @@ class ProcessServer(Process, BaseImplServer):
|
||||
nextsleep = None
|
||||
elif retval is True:
|
||||
nextsleep = None
|
||||
elif isinstance(retval, float) and nextsleep:
|
||||
elif isinstance(retval, float):
|
||||
if (retval < nextsleep):
|
||||
nextsleep = retval
|
||||
elif nextsleep is None:
|
||||
@@ -213,7 +213,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
|
||||
# Wrap Queue to provide API which isn't server implementation specific
|
||||
class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
def __init__(self, maxsize):
|
||||
multiprocessing.queues.Queue.__init__(self, maxsize, ctx=multiprocessing.get_context())
|
||||
multiprocessing.queues.Queue.__init__(self, maxsize)
|
||||
self.exit = False
|
||||
bb.utils.set_process_name("ProcessEQueue")
|
||||
|
||||
@@ -222,10 +222,11 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
|
||||
def waitEvent(self, timeout):
|
||||
if self.exit:
|
||||
return self.getEvent()
|
||||
sys.exit(1)
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
return self.getEvent()
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(True, timeout)
|
||||
except Empty:
|
||||
return None
|
||||
@@ -234,10 +235,9 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
|
||||
try:
|
||||
if not self.server.is_alive():
|
||||
self.setexit()
|
||||
return None
|
||||
return self.get(False)
|
||||
except Empty:
|
||||
if self.exit:
|
||||
sys.exit(1)
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -31,33 +31,31 @@
|
||||
in the server's main loop.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import hashlib
|
||||
import time
|
||||
import socket
|
||||
import signal
|
||||
import threading
|
||||
import pickle
|
||||
import inspect
|
||||
import select
|
||||
import http.client
|
||||
import xmlrpc.client
|
||||
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
|
||||
import bb
|
||||
import xmlrpclib, sys
|
||||
from bb import daemonize
|
||||
from bb.ui import uievent
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
import hashlib, time
|
||||
import socket
|
||||
import os, signal
|
||||
import threading
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
|
||||
DEBUG = False
|
||||
|
||||
class BBTransport(xmlrpc.client.Transport):
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
import inspect, select, httplib
|
||||
|
||||
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
|
||||
|
||||
class BBTransport(xmlrpclib.Transport):
|
||||
def __init__(self, timeout):
|
||||
self.timeout = timeout
|
||||
self.connection_token = None
|
||||
xmlrpc.client.Transport.__init__(self)
|
||||
xmlrpclib.Transport.__init__(self)
|
||||
|
||||
# Modified from default to pass timeout to HTTPConnection
|
||||
def make_connection(self, host):
|
||||
@@ -69,7 +67,7 @@ class BBTransport(xmlrpc.client.Transport):
|
||||
# create a HTTP connection object from a host descriptor
|
||||
chost, self._extra_headers, x509 = self.get_host_info(host)
|
||||
#store the host argument along with the connection object
|
||||
self._connection = host, http.client.HTTPConnection(chost, timeout=self.timeout)
|
||||
self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
|
||||
return self._connection[1]
|
||||
|
||||
def set_connection_token(self, token):
|
||||
@@ -78,11 +76,11 @@ class BBTransport(xmlrpc.client.Transport):
|
||||
def send_content(self, h, body):
|
||||
if self.connection_token:
|
||||
h.putheader("Bitbake-token", self.connection_token)
|
||||
xmlrpc.client.Transport.send_content(self, h, body)
|
||||
xmlrpclib.Transport.send_content(self, h, body)
|
||||
|
||||
def _create_server(host, port, timeout = 60):
|
||||
t = BBTransport(timeout)
|
||||
s = xmlrpc.client.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True, use_builtin_types=True)
|
||||
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
|
||||
return s, t
|
||||
|
||||
class BitBakeServerCommands():
|
||||
@@ -130,7 +128,7 @@ class BitBakeServerCommands():
|
||||
def addClient(self):
|
||||
if self.has_client:
|
||||
return None
|
||||
token = hashlib.md5(str(time.time()).encode("utf-8")).hexdigest()
|
||||
token = hashlib.md5(str(time.time())).hexdigest()
|
||||
self.server.set_connection_token(token)
|
||||
self.has_client = True
|
||||
return token
|
||||
@@ -180,7 +178,7 @@ class XMLRPCProxyServer(BaseImplServer):
|
||||
""" not a real working server, but a stub for a proxy server connection
|
||||
|
||||
"""
|
||||
def __init__(self, host, port, use_builtin_types=True):
|
||||
def __init__(self, host, port):
|
||||
self.host = host
|
||||
self.port = port
|
||||
|
||||
@@ -234,7 +232,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
while not self.quit:
|
||||
fds = [self]
|
||||
nextsleep = 0.1
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
retval = None
|
||||
try:
|
||||
retval = function(self, data, False)
|
||||
@@ -269,7 +267,7 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
|
||||
pass
|
||||
|
||||
# Tell idle functions we're exiting
|
||||
for function, data in list(self._idlefuns.items()):
|
||||
for function, data in self._idlefuns.items():
|
||||
try:
|
||||
retval = function(self, data, True)
|
||||
except:
|
||||
@@ -381,7 +379,7 @@ class BitBakeXMLRPCClient(BitBakeBaseServer):
|
||||
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
|
||||
raise e
|
||||
try:
|
||||
self.serverImpl = XMLRPCProxyServer(host, port, use_builtin_types=True)
|
||||
self.serverImpl = XMLRPCProxyServer(host, port)
|
||||
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
|
||||
return self.connection.connect(self.token)
|
||||
except Exception as e:
|
||||
|
||||
@@ -3,14 +3,19 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import pickle
|
||||
import bb.data
|
||||
from bb.checksum import FileChecksumCache
|
||||
|
||||
logger = logging.getLogger('BitBake.SigGen')
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
||||
|
||||
def init(d):
|
||||
siggens = [obj for obj in globals().values()
|
||||
siggens = [obj for obj in globals().itervalues()
|
||||
if type(obj) is type and issubclass(obj, SignatureGenerator)]
|
||||
|
||||
desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop"
|
||||
@@ -30,6 +35,7 @@ class SignatureGenerator(object):
|
||||
name = "noop"
|
||||
|
||||
def __init__(self, data):
|
||||
self.basehash = {}
|
||||
self.taskhash = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
@@ -61,11 +67,10 @@ class SignatureGenerator(object):
|
||||
return
|
||||
|
||||
def get_taskdata(self):
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints)
|
||||
return (self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash)
|
||||
|
||||
def set_taskdata(self, data):
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints = data
|
||||
|
||||
self.runtaskdeps, self.taskhash, self.file_checksum_values, self.taints, self.basehash = data
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
@@ -133,7 +138,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
var = lookupcache[dep]
|
||||
if var is not None:
|
||||
data = data + str(var)
|
||||
self.basehash[fn + "." + task] = hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
datahash = hashlib.md5(data).hexdigest()
|
||||
k = fn + "." + task
|
||||
if k in self.basehash and self.basehash[k] != datahash:
|
||||
bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], datahash))
|
||||
self.basehash[k] = datahash
|
||||
taskdeps[task] = alldeps
|
||||
|
||||
self.taskdeps[fn] = taskdeps
|
||||
@@ -181,6 +190,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.basehash[k] = data
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = []
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
@@ -216,9 +226,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if taint:
|
||||
data = data + taint
|
||||
self.taints[k] = taint
|
||||
logger.warning("%s is tainted from a forced run" % k)
|
||||
logger.warn("%s is tainted from a forced run" % k)
|
||||
|
||||
h = hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
return h
|
||||
@@ -277,12 +287,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
if 'nostamp:' in self.taints[k]:
|
||||
data['taint'] = self.taints[k]
|
||||
|
||||
computed_basehash = calc_basehash(data)
|
||||
if computed_basehash != self.basehash[k]:
|
||||
bb.error("Basehash mismatch %s versus %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
if runtime and k in self.taskhash:
|
||||
computed_taskhash = calc_taskhash(data)
|
||||
if computed_taskhash != self.taskhash[k]:
|
||||
bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
sigfile = sigfile.replace(self.taskhash[k], computed_taskhash)
|
||||
|
||||
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as stream:
|
||||
p = pickle.dump(data, stream, -1)
|
||||
stream.flush()
|
||||
os.chmod(tmpfile, 0o664)
|
||||
os.chmod(tmpfile, 0664)
|
||||
os.rename(tmpfile, sigfile)
|
||||
except (OSError, IOError) as err:
|
||||
try:
|
||||
@@ -291,15 +310,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
pass
|
||||
raise err
|
||||
|
||||
computed_basehash = calc_basehash(data)
|
||||
if computed_basehash != self.basehash[k]:
|
||||
bb.error("Basehash mismatch %s verses %s for %s" % (computed_basehash, self.basehash[k], k))
|
||||
if runtime and k in self.taskhash:
|
||||
computed_taskhash = calc_taskhash(data)
|
||||
if computed_taskhash != self.taskhash[k]:
|
||||
bb.error("Taskhash mismatch %s verses %s for %s" % (computed_taskhash, self.taskhash[k], k))
|
||||
|
||||
|
||||
def dump_sigs(self, dataCache, options):
|
||||
for fn in self.taskdeps:
|
||||
for task in self.taskdeps[fn]:
|
||||
@@ -448,11 +458,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for dep in changed:
|
||||
output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
|
||||
|
||||
if not 'file_checksum_values' in a_data:
|
||||
a_data['file_checksum_values'] = {}
|
||||
if not 'file_checksum_values' in b_data:
|
||||
b_data['file_checksum_values'] = {}
|
||||
|
||||
changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
||||
if changed:
|
||||
for f, old, new in changed:
|
||||
@@ -464,10 +469,6 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
if not 'runtaskdeps' in a_data:
|
||||
a_data['runtaskdeps'] = {}
|
||||
if not 'runtaskdeps' in b_data:
|
||||
b_data['runtaskdeps'] = {}
|
||||
|
||||
if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']):
|
||||
changed = ["Number of task dependencies changed"]
|
||||
@@ -540,7 +541,7 @@ def calc_basehash(sigdata):
|
||||
if val is not None:
|
||||
basedata = basedata + str(val)
|
||||
|
||||
return hashlib.md5(basedata.encode("utf-8")).hexdigest()
|
||||
return hashlib.md5(basedata).hexdigest()
|
||||
|
||||
def calc_taskhash(sigdata):
|
||||
data = sigdata['basehash']
|
||||
@@ -549,7 +550,8 @@ def calc_taskhash(sigdata):
|
||||
data = data + sigdata['runtaskhashes'][dep]
|
||||
|
||||
for c in sigdata['file_checksum_values']:
|
||||
data = data + c[1]
|
||||
if c[1]:
|
||||
data = data + c[1]
|
||||
|
||||
if 'taint' in sigdata:
|
||||
if 'nostamp:' in sigdata['taint']:
|
||||
@@ -557,7 +559,7 @@ def calc_taskhash(sigdata):
|
||||
else:
|
||||
data = data + sigdata['taint']
|
||||
|
||||
return hashlib.md5(data.encode("utf-8")).hexdigest()
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
|
||||
def dump_sigfile(a):
|
||||
|
||||
@@ -37,24 +37,27 @@ def re_match_strings(target, strings):
|
||||
return any(name == target or re.match(name, target)
|
||||
for name in strings)
|
||||
|
||||
class TaskEntry:
|
||||
def __init__(self):
|
||||
self.tdepends = []
|
||||
self.idepends = []
|
||||
self.irdepends = []
|
||||
|
||||
class TaskData:
|
||||
"""
|
||||
BitBake Task Data implementation
|
||||
"""
|
||||
def __init__(self, abort = True, tryaltconfigs = False, skiplist = None, allowincomplete = False):
|
||||
self.build_names_index = []
|
||||
self.run_names_index = []
|
||||
self.fn_index = []
|
||||
|
||||
self.build_targets = {}
|
||||
self.run_targets = {}
|
||||
|
||||
self.external_targets = []
|
||||
|
||||
self.seenfns = []
|
||||
self.taskentries = {}
|
||||
self.tasks_fnid = []
|
||||
self.tasks_name = []
|
||||
self.tasks_tdepends = []
|
||||
self.tasks_idepends = []
|
||||
self.tasks_irdepends = []
|
||||
# Cache to speed up task ID lookups
|
||||
self.tasks_lookup = {}
|
||||
|
||||
self.depids = {}
|
||||
self.rdepids = {}
|
||||
@@ -63,7 +66,7 @@ class TaskData:
|
||||
|
||||
self.failed_deps = []
|
||||
self.failed_rdeps = []
|
||||
self.failed_fns = []
|
||||
self.failed_fnids = []
|
||||
|
||||
self.abort = abort
|
||||
self.tryaltconfigs = tryaltconfigs
|
||||
@@ -71,6 +74,88 @@ class TaskData:
|
||||
|
||||
self.skiplist = skiplist
|
||||
|
||||
def getbuild_id(self, name):
|
||||
"""
|
||||
Return an ID number for the build target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.build_names_index:
|
||||
self.build_names_index.append(name)
|
||||
return len(self.build_names_index) - 1
|
||||
|
||||
return self.build_names_index.index(name)
|
||||
|
||||
def getrun_id(self, name):
|
||||
"""
|
||||
Return an ID number for the run target name.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.run_names_index:
|
||||
self.run_names_index.append(name)
|
||||
return len(self.run_names_index) - 1
|
||||
|
||||
return self.run_names_index.index(name)
|
||||
|
||||
def getfn_id(self, name):
|
||||
"""
|
||||
Return an ID number for the filename.
|
||||
If it doesn't exist, create one.
|
||||
"""
|
||||
if not name in self.fn_index:
|
||||
self.fn_index.append(name)
|
||||
return len(self.fn_index) - 1
|
||||
|
||||
return self.fn_index.index(name)
|
||||
|
||||
def gettask_ids(self, fnid):
|
||||
"""
|
||||
Return an array of the ID numbers matching a given fnid.
|
||||
"""
|
||||
ids = []
|
||||
if fnid in self.tasks_lookup:
|
||||
for task in self.tasks_lookup[fnid]:
|
||||
ids.append(self.tasks_lookup[fnid][task])
|
||||
return ids
|
||||
|
||||
def gettask_id_fromfnid(self, fnid, task):
|
||||
"""
|
||||
Return an ID number for the task matching fnid and task.
|
||||
"""
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
return None
|
||||
|
||||
def gettask_id(self, fn, task, create = True):
|
||||
"""
|
||||
Return an ID number for the task matching fn and task.
|
||||
If it doesn't exist, create one by default.
|
||||
Optionally return None instead.
|
||||
"""
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
if not create:
|
||||
return None
|
||||
|
||||
self.tasks_name.append(task)
|
||||
self.tasks_fnid.append(fnid)
|
||||
self.tasks_tdepends.append([])
|
||||
self.tasks_idepends.append([])
|
||||
self.tasks_irdepends.append([])
|
||||
|
||||
listid = len(self.tasks_name) - 1
|
||||
|
||||
if fnid not in self.tasks_lookup:
|
||||
self.tasks_lookup[fnid] = {}
|
||||
self.tasks_lookup[fnid][task] = listid
|
||||
|
||||
return listid
|
||||
|
||||
def add_tasks(self, fn, dataCache):
|
||||
"""
|
||||
Add tasks for a given fn to the database
|
||||
@@ -78,31 +163,29 @@ class TaskData:
|
||||
|
||||
task_deps = dataCache.task_deps[fn]
|
||||
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
if fnid in self.failed_fnids:
|
||||
bb.msg.fatal("TaskData", "Trying to re-add a failed file? Something is broken...")
|
||||
|
||||
# Check if we've already seen this fn
|
||||
if fn in self.seenfns:
|
||||
if fnid in self.tasks_fnid:
|
||||
return
|
||||
|
||||
self.seenfns.append(fn)
|
||||
|
||||
self.add_extra_deps(fn, dataCache)
|
||||
|
||||
for task in task_deps['tasks']:
|
||||
|
||||
tid = "%s:%s" % (fn, task)
|
||||
self.taskentries[tid] = TaskEntry()
|
||||
|
||||
# Work out task dependencies
|
||||
parentids = []
|
||||
for dep in task_deps['parents'][task]:
|
||||
if dep not in task_deps['tasks']:
|
||||
bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
|
||||
continue
|
||||
parentid = "%s:%s" % (fn, dep)
|
||||
parentid = self.gettask_id(fn, dep)
|
||||
parentids.append(parentid)
|
||||
self.taskentries[tid].tdepends.extend(parentids)
|
||||
taskid = self.gettask_id(fn, task)
|
||||
self.tasks_tdepends[taskid].extend(parentids)
|
||||
|
||||
# Touch all intertask dependencies
|
||||
if 'depends' in task_deps and task in task_deps['depends']:
|
||||
@@ -111,30 +194,29 @@ class TaskData:
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((dep.split(":")[0]), dep.split(":")[1]))
|
||||
self.seen_build_target(dep.split(":")[0])
|
||||
self.taskentries[tid].idepends.extend(ids)
|
||||
ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_idepends[taskid].extend(ids)
|
||||
if 'rdepends' in task_deps and task in task_deps['rdepends']:
|
||||
ids = []
|
||||
for dep in task_deps['rdepends'][task].split():
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((dep.split(":")[0]), dep.split(":")[1]))
|
||||
self.seen_run_target(dep.split(":")[0])
|
||||
self.taskentries[tid].irdepends.extend(ids)
|
||||
ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_irdepends[taskid].extend(ids)
|
||||
|
||||
|
||||
# Work out build dependencies
|
||||
if not fn in self.depids:
|
||||
dependids = set()
|
||||
if not fnid in self.depids:
|
||||
dependids = {}
|
||||
for depend in dataCache.deps[fn]:
|
||||
dependids.add(depend)
|
||||
self.depids[fn] = list(dependids)
|
||||
dependids[self.getbuild_id(depend)] = None
|
||||
self.depids[fnid] = dependids.keys()
|
||||
logger.debug(2, "Added dependencies %s for %s", str(dataCache.deps[fn]), fn)
|
||||
|
||||
# Work out runtime dependencies
|
||||
if not fn in self.rdepids:
|
||||
rdependids = set()
|
||||
if not fnid in self.rdepids:
|
||||
rdependids = {}
|
||||
rdepends = dataCache.rundeps[fn]
|
||||
rrecs = dataCache.runrecs[fn]
|
||||
rdependlist = []
|
||||
@@ -142,26 +224,24 @@ class TaskData:
|
||||
for package in rdepends:
|
||||
for rdepend in rdepends[package]:
|
||||
rdependlist.append(rdepend)
|
||||
rdependids.add(rdepend)
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
for package in rrecs:
|
||||
for rdepend in rrecs[package]:
|
||||
rreclist.append(rdepend)
|
||||
rdependids.add(rdepend)
|
||||
rdependids[self.getrun_id(rdepend)] = None
|
||||
if rdependlist:
|
||||
logger.debug(2, "Added runtime dependencies %s for %s", str(rdependlist), fn)
|
||||
if rreclist:
|
||||
logger.debug(2, "Added runtime recommendations %s for %s", str(rreclist), fn)
|
||||
self.rdepids[fn] = list(rdependids)
|
||||
self.rdepids[fnid] = rdependids.keys()
|
||||
|
||||
for dep in self.depids[fn]:
|
||||
self.seen_build_target(dep)
|
||||
for dep in self.depids[fnid]:
|
||||
if dep in self.failed_deps:
|
||||
self.fail_fn(fn)
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
for dep in self.rdepids[fn]:
|
||||
self.seen_run_target(dep)
|
||||
for dep in self.rdepids[fnid]:
|
||||
if dep in self.failed_rdeps:
|
||||
self.fail_fn(fn)
|
||||
self.fail_fnid(fnid)
|
||||
return
|
||||
|
||||
def add_extra_deps(self, fn, dataCache):
|
||||
@@ -183,7 +263,9 @@ class TaskData:
|
||||
"""
|
||||
Have we a build target matching this name?
|
||||
"""
|
||||
if target in self.build_targets and self.build_targets[target]:
|
||||
targetid = self.getbuild_id(target)
|
||||
|
||||
if targetid in self.build_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -191,54 +273,50 @@ class TaskData:
|
||||
"""
|
||||
Have we a runtime target matching this name?
|
||||
"""
|
||||
if target in self.run_targets and self.run_targets[target]:
|
||||
targetid = self.getrun_id(target)
|
||||
|
||||
if targetid in self.run_targets:
|
||||
return True
|
||||
return False
|
||||
|
||||
def seen_build_target(self, name):
|
||||
"""
|
||||
Maintain a list of build targets
|
||||
"""
|
||||
if name not in self.build_targets:
|
||||
self.build_targets[name] = []
|
||||
|
||||
def add_build_target(self, fn, item):
|
||||
"""
|
||||
Add a build target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
if item in self.build_targets:
|
||||
if fn in self.build_targets[item]:
|
||||
return
|
||||
self.build_targets[item].append(fn)
|
||||
return
|
||||
self.build_targets[item] = [fn]
|
||||
targetid = self.getbuild_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
def seen_run_target(self, name):
|
||||
"""
|
||||
Maintain a list of runtime build targets
|
||||
"""
|
||||
if name not in self.run_targets:
|
||||
self.run_targets[name] = []
|
||||
if targetid in self.build_targets:
|
||||
if fnid in self.build_targets[targetid]:
|
||||
return
|
||||
self.build_targets[targetid].append(fnid)
|
||||
return
|
||||
self.build_targets[targetid] = [fnid]
|
||||
|
||||
def add_runtime_target(self, fn, item):
|
||||
"""
|
||||
Add a runtime target.
|
||||
If already present, append the provider fn to the list
|
||||
"""
|
||||
if item in self.run_targets:
|
||||
if fn in self.run_targets[item]:
|
||||
return
|
||||
self.run_targets[item].append(fn)
|
||||
return
|
||||
self.run_targets[item] = [fn]
|
||||
targetid = self.getrun_id(item)
|
||||
fnid = self.getfn_id(fn)
|
||||
|
||||
def mark_external_target(self, target):
|
||||
if targetid in self.run_targets:
|
||||
if fnid in self.run_targets[targetid]:
|
||||
return
|
||||
self.run_targets[targetid].append(fnid)
|
||||
return
|
||||
self.run_targets[targetid] = [fnid]
|
||||
|
||||
def mark_external_target(self, item):
|
||||
"""
|
||||
Mark a build target as being externally requested
|
||||
"""
|
||||
if target not in self.external_targets:
|
||||
self.external_targets.append(target)
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
if targetid not in self.external_targets:
|
||||
self.external_targets.append(targetid)
|
||||
|
||||
def get_unresolved_build_targets(self, dataCache):
|
||||
"""
|
||||
@@ -246,12 +324,12 @@ class TaskData:
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.build_targets:
|
||||
for target in self.build_names_index:
|
||||
if re_match_strings(target, dataCache.ignored_dependencies):
|
||||
continue
|
||||
if target in self.failed_deps:
|
||||
if self.build_names_index.index(target) in self.failed_deps:
|
||||
continue
|
||||
if not self.build_targets[target]:
|
||||
if not self.have_build_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
@@ -261,12 +339,12 @@ class TaskData:
|
||||
are unknown.
|
||||
"""
|
||||
unresolved = []
|
||||
for target in self.run_targets:
|
||||
for target in self.run_names_index:
|
||||
if re_match_strings(target, dataCache.ignored_dependencies):
|
||||
continue
|
||||
if target in self.failed_rdeps:
|
||||
if self.run_names_index.index(target) in self.failed_rdeps:
|
||||
continue
|
||||
if not self.run_targets[target]:
|
||||
if not self.have_runtime_target(target):
|
||||
unresolved.append(target)
|
||||
return unresolved
|
||||
|
||||
@@ -274,26 +352,50 @@ class TaskData:
|
||||
"""
|
||||
Return a list of providers of item
|
||||
"""
|
||||
return self.build_targets[item]
|
||||
targetid = self.getbuild_id(item)
|
||||
|
||||
def get_dependees(self, item):
|
||||
return self.build_targets[targetid]
|
||||
|
||||
def get_dependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on item
|
||||
"""
|
||||
dependees = []
|
||||
for fn in self.depids:
|
||||
if item in self.depids[fn]:
|
||||
dependees.append(fn)
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_rdependees(self, item):
|
||||
def get_dependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on item as a user readable string
|
||||
"""
|
||||
itemid = self.getbuild_id(item)
|
||||
dependees = []
|
||||
for fnid in self.depids:
|
||||
if itemid in self.depids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def get_rdependees(self, itemid):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item
|
||||
"""
|
||||
dependees = []
|
||||
for fn in self.rdepids:
|
||||
if item in self.rdepids[fn]:
|
||||
dependees.append(fn)
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(fnid)
|
||||
return dependees
|
||||
|
||||
def get_rdependees_str(self, item):
|
||||
"""
|
||||
Return a list of targets which depend on runtime item as a user readable string
|
||||
"""
|
||||
itemid = self.getrun_id(item)
|
||||
dependees = []
|
||||
for fnid in self.rdepids:
|
||||
if itemid in self.rdepids[fnid]:
|
||||
dependees.append(self.fn_index[fnid])
|
||||
return dependees
|
||||
|
||||
def get_reasons(self, item, runtime=False):
|
||||
@@ -329,7 +431,7 @@ class TaskData:
|
||||
except bb.providers.NoProvider:
|
||||
if self.abort:
|
||||
raise
|
||||
self.remove_buildtarget(item)
|
||||
self.remove_buildtarget(self.getbuild_id(item))
|
||||
|
||||
self.mark_external_target(item)
|
||||
|
||||
@@ -344,14 +446,14 @@ class TaskData:
|
||||
return
|
||||
|
||||
if not item in dataCache.providers:
|
||||
close_matches = self.get_close_matches(item, list(dataCache.providers.keys()))
|
||||
close_matches = self.get_close_matches(item, dataCache.providers.keys())
|
||||
# Is it in RuntimeProviders ?
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
for fn in all_p:
|
||||
new = dataCache.pkg_fn[fn] + " RPROVIDES " + item
|
||||
if new not in close_matches:
|
||||
close_matches.append(new)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=self.get_reasons(item), close_matches=close_matches), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if self.have_build_target(item):
|
||||
@@ -360,10 +462,10 @@ class TaskData:
|
||||
all_p = dataCache.providers[item]
|
||||
|
||||
eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not p in self.failed_fns]
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item), reasons=["No eligible PROVIDERs exist for '%s'" % item]), cfgData)
|
||||
raise bb.providers.NoProvider(item)
|
||||
|
||||
if len(eligible) > 1 and foundUnique == False:
|
||||
@@ -375,7 +477,8 @@ class TaskData:
|
||||
self.consider_msgs_cache.append(item)
|
||||
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
logger.debug(2, "adding %s to satisfy %s", fn, item)
|
||||
self.add_build_target(fn, item)
|
||||
@@ -399,14 +502,14 @@ class TaskData:
|
||||
all_p = bb.providers.getRuntimeProviders(dataCache, item)
|
||||
|
||||
if not all_p:
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=self.get_reasons(item, True)), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=self.get_reasons(item, True)), cfgData)
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
|
||||
eligible = [p for p in eligible if not p in self.failed_fns]
|
||||
eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
|
||||
|
||||
if not eligible:
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
|
||||
bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item), reasons=["No eligible RPROVIDERs exist for '%s'" % item]), cfgData)
|
||||
raise bb.providers.NoRProvider(item)
|
||||
|
||||
if len(eligible) > 1 and numberPreferred == 0:
|
||||
@@ -428,80 +531,82 @@ class TaskData:
|
||||
|
||||
# run through the list until we find one that we can build
|
||||
for fn in eligible:
|
||||
if fn in self.failed_fns:
|
||||
fnid = self.getfn_id(fn)
|
||||
if fnid in self.failed_fnids:
|
||||
continue
|
||||
logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
|
||||
self.add_runtime_target(fn, item)
|
||||
self.add_tasks(fn, dataCache)
|
||||
|
||||
def fail_fn(self, fn, missing_list=None):
|
||||
def fail_fnid(self, fnid, missing_list=None):
|
||||
"""
|
||||
Mark a file as failed (unbuildable)
|
||||
Remove any references from build and runtime provider lists
|
||||
|
||||
missing_list, A list of missing requirements for this target
|
||||
"""
|
||||
if fn in self.failed_fns:
|
||||
if fnid in self.failed_fnids:
|
||||
return
|
||||
if not missing_list:
|
||||
missing_list = []
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", fn)
|
||||
self.failed_fns.append(fn)
|
||||
logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
|
||||
self.failed_fnids.append(fnid)
|
||||
for target in self.build_targets:
|
||||
if fn in self.build_targets[target]:
|
||||
self.build_targets[target].remove(fn)
|
||||
if fnid in self.build_targets[target]:
|
||||
self.build_targets[target].remove(fnid)
|
||||
if len(self.build_targets[target]) == 0:
|
||||
self.remove_buildtarget(target, missing_list)
|
||||
for target in self.run_targets:
|
||||
if fn in self.run_targets[target]:
|
||||
self.run_targets[target].remove(fn)
|
||||
if fnid in self.run_targets[target]:
|
||||
self.run_targets[target].remove(fnid)
|
||||
if len(self.run_targets[target]) == 0:
|
||||
self.remove_runtarget(target, missing_list)
|
||||
|
||||
def remove_buildtarget(self, target, missing_list=None):
|
||||
def remove_buildtarget(self, targetid, missing_list=None):
|
||||
"""
|
||||
Mark a build target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [target]
|
||||
missing_list = [self.build_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [target] + missing_list
|
||||
logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
self.failed_deps.append(target)
|
||||
dependees = self.get_dependees(target)
|
||||
for fn in dependees:
|
||||
self.fail_fn(fn, missing_list)
|
||||
for tid in self.taskentries:
|
||||
for (idepend, idependtask) in self.taskentries[tid].idepends:
|
||||
if idepend == target:
|
||||
fn = tid.rsplit(":",1)[0]
|
||||
self.fail_fn(fn, missing_list)
|
||||
missing_list = [self.build_names_index[targetid]] + missing_list
|
||||
logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
|
||||
self.failed_deps.append(targetid)
|
||||
dependees = self.get_dependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_idepends)):
|
||||
idepends = self.tasks_idepends[taskid]
|
||||
for (idependid, idependtask) in idepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
if self.abort and target in self.external_targets:
|
||||
if self.abort and targetid in self.external_targets:
|
||||
target = self.build_names_index[targetid]
|
||||
logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
raise bb.providers.NoProvider(target)
|
||||
|
||||
def remove_runtarget(self, target, missing_list=None):
|
||||
def remove_runtarget(self, targetid, missing_list=None):
|
||||
"""
|
||||
Mark a run target as failed (unbuildable)
|
||||
Trigger removal of any files that have this as a dependency
|
||||
"""
|
||||
if not missing_list:
|
||||
missing_list = [target]
|
||||
missing_list = [self.run_names_index[targetid]]
|
||||
else:
|
||||
missing_list = [target] + missing_list
|
||||
missing_list = [self.run_names_index[targetid]] + missing_list
|
||||
|
||||
logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", target, missing_list)
|
||||
self.failed_rdeps.append(target)
|
||||
dependees = self.get_rdependees(target)
|
||||
for fn in dependees:
|
||||
self.fail_fn(fn, missing_list)
|
||||
for tid in self.taskentries:
|
||||
for (idepend, idependtask) in self.taskentries[tid].irdepends:
|
||||
if idepend == target:
|
||||
fn = tid.rsplit(":",1)[0]
|
||||
self.fail_fn(fn, missing_list)
|
||||
logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
|
||||
self.failed_rdeps.append(targetid)
|
||||
dependees = self.get_rdependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_irdepends)):
|
||||
irdepends = self.tasks_irdepends[taskid]
|
||||
for (idependid, idependtask) in irdepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
def add_unresolved(self, cfgData, dataCache):
|
||||
"""
|
||||
@@ -515,16 +620,17 @@ class TaskData:
|
||||
self.add_provider_internal(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except bb.providers.NoProvider:
|
||||
if self.abort and target in self.external_targets and not self.allowincomplete:
|
||||
targetid = self.getbuild_id(target)
|
||||
if self.abort and targetid in self.external_targets and not self.allowincomplete:
|
||||
raise
|
||||
if not self.allowincomplete:
|
||||
self.remove_buildtarget(target)
|
||||
self.remove_buildtarget(targetid)
|
||||
for target in self.get_unresolved_run_targets(dataCache):
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
|
||||
self.remove_runtarget(target)
|
||||
self.remove_runtarget(self.getrun_id(target))
|
||||
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||
if added == 0:
|
||||
break
|
||||
@@ -532,13 +638,13 @@ class TaskData:
|
||||
|
||||
def get_providermap(self, prefix=None):
|
||||
provmap = {}
|
||||
for name in self.build_targets:
|
||||
for name in self.build_names_index:
|
||||
if prefix and not name.startswith(prefix):
|
||||
continue
|
||||
if self.have_build_target(name):
|
||||
provider = self.get_provider(name)
|
||||
if provider:
|
||||
provmap[name] = provider[0]
|
||||
provmap[name] = self.fn_index[provider[0]]
|
||||
return provmap
|
||||
|
||||
def dump_data(self):
|
||||
@@ -546,37 +652,39 @@ class TaskData:
|
||||
Dump some debug information on the internal data structures
|
||||
"""
|
||||
logger.debug(3, "build_names:")
|
||||
logger.debug(3, ", ".join(self.build_targets))
|
||||
logger.debug(3, ", ".join(self.build_names_index))
|
||||
|
||||
logger.debug(3, "run_names:")
|
||||
logger.debug(3, ", ".join(self.run_targets))
|
||||
logger.debug(3, ", ".join(self.run_names_index))
|
||||
|
||||
logger.debug(3, "build_targets:")
|
||||
for target in self.build_targets:
|
||||
for buildid in xrange(len(self.build_names_index)):
|
||||
target = self.build_names_index[buildid]
|
||||
targets = "None"
|
||||
if target in self.build_targets:
|
||||
targets = self.build_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
if buildid in self.build_targets:
|
||||
targets = self.build_targets[buildid]
|
||||
logger.debug(3, " (%s)%s: %s", buildid, target, targets)
|
||||
|
||||
logger.debug(3, "run_targets:")
|
||||
for target in self.run_targets:
|
||||
for runid in xrange(len(self.run_names_index)):
|
||||
target = self.run_names_index[runid]
|
||||
targets = "None"
|
||||
if target in self.run_targets:
|
||||
targets = self.run_targets[target]
|
||||
logger.debug(3, " %s: %s", target, targets)
|
||||
if runid in self.run_targets:
|
||||
targets = self.run_targets[runid]
|
||||
logger.debug(3, " (%s)%s: %s", runid, target, targets)
|
||||
|
||||
logger.debug(3, "tasks:")
|
||||
for tid in self.taskentries:
|
||||
logger.debug(3, " %s: %s %s %s",
|
||||
tid,
|
||||
self.taskentries[tid].idepends,
|
||||
self.taskentries[tid].irdepends,
|
||||
self.taskentries[tid].tdepends)
|
||||
for task in xrange(len(self.tasks_name)):
|
||||
logger.debug(3, " (%s)%s - %s: %s",
|
||||
task,
|
||||
self.fn_index[self.tasks_fnid[task]],
|
||||
self.tasks_name[task],
|
||||
self.tasks_tdepends[task])
|
||||
|
||||
logger.debug(3, "dependency ids (per fn):")
|
||||
for fn in self.depids:
|
||||
logger.debug(3, " %s: %s", fn, self.depids[fn])
|
||||
for fnid in self.depids:
|
||||
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
|
||||
|
||||
logger.debug(3, "runtime dependency ids (per fn):")
|
||||
for fn in self.rdepids:
|
||||
logger.debug(3, " %s: %s", fn, self.rdepids[fn])
|
||||
for fnid in self.rdepids:
|
||||
logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
|
||||
|
||||
@@ -191,8 +191,8 @@ class PythonReferenceTest(ReferenceTest):
|
||||
if hasattr(bb.utils, "_context"):
|
||||
self.context = bb.utils._context
|
||||
else:
|
||||
import builtins
|
||||
self.context = builtins.__dict__
|
||||
import __builtin__
|
||||
self.context = __builtin__.__dict__
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
@@ -302,7 +302,7 @@ bb.data.getVar(a(), d, False)
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
@@ -349,7 +349,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["somevar", "inverted"] + execs))
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
@@ -359,7 +359,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
@@ -368,7 +368,7 @@ esac
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEqual(deps, set(["oe_libinstall"]))
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
|
||||
@@ -34,14 +34,14 @@ class COWTestCase(unittest.TestCase):
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEqual(False, 'a' in a)
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEqual(True, 'a' in a)
|
||||
self.assertEqual(True, 'b' in a)
|
||||
self.assertEqual('a', a['a'] )
|
||||
self.assertEqual('b', a['b'] )
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
@@ -60,31 +60,31 @@ class COWTestCase(unittest.TestCase):
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEqual(False, 'c' in c)
|
||||
self.assertEqual(30, c['a'])
|
||||
self.assertEqual(10, b['a'])
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEqual(False, 'c' in c_2)
|
||||
self.assertEqual(10, b_2['a'])
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEqual(False, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEqual(True, 'd' in c_2)
|
||||
self.assertEqual(True, 'd' in b_2)
|
||||
self.assertEqual(30, c_2['d'])
|
||||
self.assertEqual(40, b_2['d'])
|
||||
self.assertEqual(False, 'd' in b)
|
||||
self.assertEqual(False, 'd' in c)
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
@@ -92,19 +92,19 @@ class COWTestCase(unittest.TestCase):
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(False, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEqual('viel', b_3['e'])
|
||||
self.assertEqual(4711, c_3['e'])
|
||||
self.assertEqual(False, 'e' in c_2)
|
||||
self.assertEqual(True, 'e' in b_3)
|
||||
self.assertEqual(False, 'e' in b_3_2)
|
||||
self.assertEqual(False, 'e' in b_2)
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
@@ -115,12 +115,12 @@ class COWTestCase(unittest.TestCase):
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1027, copy['123'])
|
||||
self.assertEqual(4711, copy['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
@@ -128,9 +128,9 @@ class COWTestCase(unittest.TestCase):
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEqual(1027, c['123'])
|
||||
self.assertEqual(4711, c['other'])
|
||||
self.assertEqual({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEqual(1028, copy['123'])
|
||||
self.assertEqual(4712, copy['other'])
|
||||
self.assertEqual({'abc':20, 'bcd':20}, copy['d'])
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
|
||||
@@ -147,14 +147,14 @@ class DataExpansions(unittest.TestCase):
|
||||
self.assertEqual(self.d.getVar("foo", False), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = list(self.d.keys())
|
||||
self.assertCountEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
|
||||
|
||||
def test_keys_deletion(self):
|
||||
newd = bb.data.createCopy(self.d)
|
||||
newd.delVar("bar")
|
||||
keys = list(newd.keys())
|
||||
self.assertCountEqual(keys, ['value_of_foo', 'foo'])
|
||||
keys = newd.keys()
|
||||
self.assertEqual(keys, ['value_of_foo', 'foo'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
@@ -334,7 +334,7 @@ class TestOverrides(unittest.TestCase):
|
||||
self.d.setVar("TEST2_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST2", True), "testvalue2")
|
||||
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar'])
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
@@ -342,7 +342,7 @@ class TestOverrides(unittest.TestCase):
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
self.assertItemsEqual(self.d.keys(), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local'])
|
||||
|
||||
def test_multiple_combined_overrides(self):
|
||||
self.d.setVar("TEST_local_foo_bar", "testvalue3")
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import collections
|
||||
import os
|
||||
from bb.fetch2 import URI
|
||||
from bb.fetch2 import FetchMethod
|
||||
@@ -134,10 +133,10 @@ class URITest(unittest.TestCase):
|
||||
'userinfo': 'anoncvs:anonymous',
|
||||
'username': 'anoncvs',
|
||||
'password': 'anonymous',
|
||||
'params': collections.OrderedDict([
|
||||
('tag', 'V0-99-81'),
|
||||
('module', 'familiar/dist/ipkg')
|
||||
]),
|
||||
'params': {
|
||||
'tag': 'V0-99-81',
|
||||
'module': 'familiar/dist/ipkg'
|
||||
},
|
||||
'query': {},
|
||||
'relative': False
|
||||
},
|
||||
@@ -451,7 +450,7 @@ class MirrorUriTest(FetcherTest):
|
||||
class FetcherLocalTest(FetcherTest):
|
||||
def setUp(self):
|
||||
def touch(fn):
|
||||
with open(fn, 'a'):
|
||||
with file(fn, 'a'):
|
||||
os.utime(fn, None)
|
||||
|
||||
super(FetcherLocalTest, self).setUp()
|
||||
@@ -661,7 +660,7 @@ class URLHandle(unittest.TestCase):
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}),
|
||||
"git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}),
|
||||
"file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
|
||||
}
|
||||
@@ -768,7 +767,6 @@ class FetchLatestVersionTest(FetcherTest):
|
||||
|
||||
class FetchCheckStatusTest(FetcherTest):
|
||||
test_wget_uris = ["http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2",
|
||||
"http://www.cups.org/software/ipptool/ipptool-20130731-linux-ubuntu-i686.tar.gz",
|
||||
"http://www.cups.org/",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
|
||||
"http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
|
||||
|
||||
@@ -50,7 +50,7 @@ C = "3"
|
||||
def parsehelper(self, content, suffix = ".bb"):
|
||||
|
||||
f = tempfile.NamedTemporaryFile(suffix = suffix)
|
||||
f.write(bytes(content, "utf-8"))
|
||||
f.write(content)
|
||||
f.flush()
|
||||
os.chdir(os.path.dirname(f.name))
|
||||
return f
|
||||
|
||||
@@ -21,19 +21,19 @@ import bb
|
||||
import re
|
||||
import os
|
||||
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
|
||||
|
||||
|
||||
import django
|
||||
from django.utils import timezone
|
||||
|
||||
import toaster
|
||||
# Add toaster module to the search path to help django.setup() find the right
|
||||
# modules
|
||||
sys.path.insert(0, os.path.dirname(toaster.__file__))
|
||||
|
||||
#Set the DJANGO_SETTINGS_MODULE if it's not already set
|
||||
os.environ["DJANGO_SETTINGS_MODULE"] =\
|
||||
os.environ.get("DJANGO_SETTINGS_MODULE",
|
||||
"toaster.toastermain.settings")
|
||||
# Setup django framework (needs to be done before importing modules)
|
||||
def _configure_toaster():
|
||||
""" Add toaster to sys path for importing modules
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'toaster'))
|
||||
_configure_toaster()
|
||||
|
||||
django.setup()
|
||||
|
||||
from orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
|
||||
@@ -54,11 +54,11 @@ from datetime import datetime, timedelta
|
||||
|
||||
from django.db import transaction, connection
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# the logger name is standard throughout BitBake
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
|
||||
|
||||
class NotExisting(Exception):
|
||||
pass
|
||||
|
||||
@@ -494,7 +494,7 @@ class ORMWrapper(object):
|
||||
parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
path = unicode(path, 'utf-8'),
|
||||
size = size,
|
||||
inodetype = Target_File.ITYPE_DIRECTORY,
|
||||
permission = permission,
|
||||
@@ -519,7 +519,7 @@ class ORMWrapper(object):
|
||||
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
path = unicode(path, 'utf-8'),
|
||||
size = size,
|
||||
inodetype = inodetype,
|
||||
permission = permission,
|
||||
@@ -550,7 +550,9 @@ class ORMWrapper(object):
|
||||
filetarget_path = "/".join(fcpl)
|
||||
|
||||
try:
|
||||
filetarget_obj = Target_File.objects.get(target = target_obj, path = filetarget_path)
|
||||
filetarget_obj = Target_File.objects.get(
|
||||
target = target_obj,
|
||||
path = unicode(filetarget_path, 'utf-8'))
|
||||
except Target_File.DoesNotExist:
|
||||
# we might have an invalid link; no way to detect this. just set it to None
|
||||
filetarget_obj = None
|
||||
@@ -559,7 +561,7 @@ class ORMWrapper(object):
|
||||
|
||||
tf_obj = Target_File.objects.create(
|
||||
target = target_obj,
|
||||
path = path,
|
||||
path = unicode(path, 'utf-8'),
|
||||
size = size,
|
||||
inodetype = Target_File.ITYPE_SYMLINK,
|
||||
permission = permission,
|
||||
@@ -662,8 +664,8 @@ class ORMWrapper(object):
|
||||
dep_type = tdeptype,
|
||||
target = target_obj))
|
||||
except KeyError as e:
|
||||
logger.warning("Could not add dependency to the package %s "
|
||||
"because %s is an unknown package", p, px)
|
||||
logger.warn("Could not add dependency to the package %s "
|
||||
"because %s is an unknown package", p, px)
|
||||
|
||||
if len(packagedeps_objs) > 0:
|
||||
Package_Dependency.objects.bulk_create(packagedeps_objs)
|
||||
@@ -671,7 +673,7 @@ class ORMWrapper(object):
|
||||
logger.info("No package dependencies created")
|
||||
|
||||
if len(errormsg) > 0:
|
||||
logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
|
||||
logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg)
|
||||
|
||||
def save_target_image_file_information(self, target_obj, file_name, file_size):
|
||||
Target_Image_File.objects.create( target = target_obj,
|
||||
@@ -930,7 +932,7 @@ class BuildInfoHelper(object):
|
||||
return lvo
|
||||
|
||||
#if we get here, we didn't read layers correctly; dump whatever information we have on the error log
|
||||
logger.warning("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
|
||||
logger.warn("Could not match layer version for recipe path %s : %s", path, self.orm_wrapper.layer_version_objects)
|
||||
|
||||
#mockup the new layer
|
||||
unknown_layer, _ = Layer.objects.get_or_create(name="Unidentified layer", layer_index_url="")
|
||||
@@ -1001,7 +1003,7 @@ class BuildInfoHelper(object):
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
|
||||
self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)]['local_path'] = layerinfos[layer]['local_path']
|
||||
except NotExisting as nee:
|
||||
logger.warning("buildinfohelper: cannot identify layer exception:%s ", nee)
|
||||
logger.warn("buildinfohelper: cannot identify layer exception:%s ", nee)
|
||||
|
||||
|
||||
def store_started_build(self, event, build_log_path):
|
||||
@@ -1067,7 +1069,7 @@ class BuildInfoHelper(object):
|
||||
|
||||
for t in self.internal_state['targets']:
|
||||
if t.is_image == True:
|
||||
output_files = list(evdata.keys())
|
||||
output_files = list(evdata.viewkeys())
|
||||
for output in output_files:
|
||||
if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
|
||||
self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
|
||||
@@ -1238,27 +1240,17 @@ class BuildInfoHelper(object):
|
||||
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'], built_package=True)
|
||||
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata.copy(), pkgdata, self.internal_state['recipes'], built_package=False)
|
||||
except KeyError as e:
|
||||
logger.warning("KeyError in save_target_package_information"
|
||||
"%s ", e)
|
||||
logger.warn("KeyError in save_target_package_information"
|
||||
"%s ", e)
|
||||
|
||||
try:
|
||||
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
|
||||
except KeyError as e:
|
||||
logger.warning("KeyError in save_target_file_information"
|
||||
"%s ", e)
|
||||
logger.warn("KeyError in save_target_file_information"
|
||||
"%s ", e)
|
||||
|
||||
|
||||
|
||||
def cancel_cli_build(self):
|
||||
"""
|
||||
If a build is currently underway, set its state to CANCELLED;
|
||||
note that this only gets called for command line builds which are
|
||||
interrupted, so it doesn't touch any BuildRequest objects
|
||||
"""
|
||||
build = self.internal_state['build']
|
||||
if build:
|
||||
build.outcome = Build.CANCELLED
|
||||
build.save()
|
||||
|
||||
def store_dependency_information(self, event):
|
||||
assert '_depgraph' in vars(event)
|
||||
@@ -1400,7 +1392,7 @@ class BuildInfoHelper(object):
|
||||
Task_Dependency.objects.bulk_create(taskdeps_objects)
|
||||
|
||||
if len(errormsg) > 0:
|
||||
logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
|
||||
logger.warn("buildinfohelper: dependency info not identify recipes: \n%s", errormsg)
|
||||
|
||||
|
||||
def store_build_package_information(self, event):
|
||||
|
||||
17
bitbake/lib/bb/ui/crumbs/__init__.py
Normal file
17
bitbake/lib/bb/ui/crumbs/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
#
|
||||
# Gtk+ UI pieces for BitBake
|
||||
#
|
||||
# Copyright (C) 2006-2007 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
44
bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
Normal file
44
bitbake/lib/bb/ui/crumbs/hig/crumbsdialog.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CrumbsDialog(gtk.Dialog):
|
||||
"""
|
||||
A GNOME HIG compliant dialog widget.
|
||||
Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
|
||||
"""
|
||||
def __init__(self, title="", parent=None, flags=0, buttons=None):
|
||||
super(CrumbsDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.set_property("has-separator", False) # note: deprecated in 2.22
|
||||
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 12)
|
||||
self.action_area.set_property("spacing", 12)
|
||||
self.action_area.set_property("border-width", 6)
|
||||
70
bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
Normal file
70
bitbake/lib/bb/ui/crumbs/hig/crumbsmessagedialog.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glib
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobwidget import HobIconChecker
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CrumbsMessageDialog(gtk.MessageDialog):
|
||||
"""
|
||||
A GNOME HIG compliant dialog widget.
|
||||
Add buttons with gtk.Dialog.add_button or gtk.Dialog.add_buttons
|
||||
"""
|
||||
def __init__(self, parent = None, label="", dialog_type = gtk.MESSAGE_QUESTION, msg=""):
|
||||
super(CrumbsMessageDialog, self).__init__(None,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
|
||||
dialog_type,
|
||||
gtk.BUTTONS_NONE,
|
||||
None)
|
||||
|
||||
self.set_skip_taskbar_hint(False)
|
||||
|
||||
self.set_markup(label)
|
||||
|
||||
if 0 <= len(msg) < 300:
|
||||
self.format_secondary_markup(msg)
|
||||
else:
|
||||
vbox = self.get_message_area()
|
||||
vbox.set_border_width(1)
|
||||
vbox.set_property("spacing", 12)
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.msgView = gtk.TextView()
|
||||
self.msgView.set_editable(False)
|
||||
self.msgView.set_wrap_mode(gtk.WRAP_WORD)
|
||||
self.msgView.set_cursor_visible(False)
|
||||
self.msgView.set_size_request(300, 300)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(msg)
|
||||
self.msgView.set_buffer(self.buf)
|
||||
self.textWindow.add(self.msgView)
|
||||
self.msgView.show()
|
||||
vbox.add(self.textWindow)
|
||||
self.textWindow.show()
|
||||
219
bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
Normal file
219
bitbake/lib/bb/ui/crumbs/hig/deployimagedialog.py
Normal file
@@ -0,0 +1,219 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glob
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import tempfile
|
||||
from bb.ui.crumbs.hobwidget import hic, HobButton
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
import bb.ui.crumbs.utils
|
||||
import bb.process
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class DeployImageDialog (CrumbsDialog):
|
||||
|
||||
__dummy_usb__ = "--select a usb drive--"
|
||||
|
||||
def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
|
||||
super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.image_path = image_path
|
||||
self.standalone = standalone
|
||||
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(600, 400)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
|
||||
label.set_markup(markup)
|
||||
self.vbox.pack_start(label, expand=False, fill=False, padding=2)
|
||||
|
||||
table = gtk.Table(2, 10, False)
|
||||
table.set_col_spacings(5)
|
||||
table.set_row_spacings(5)
|
||||
self.vbox.pack_start(table, expand=True, fill=True)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
tv = gtk.TextView()
|
||||
tv.set_editable(False)
|
||||
tv.set_wrap_mode(gtk.WRAP_WORD)
|
||||
tv.set_cursor_visible(False)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(self.image_path)
|
||||
tv.set_buffer(self.buf)
|
||||
scroll.add(tv)
|
||||
table.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
# There are 2 ways to use DeployImageDialog
|
||||
# One way is that called by HOB when the 'Deploy Image' button is clicked
|
||||
# The other way is that called by a standalone script.
|
||||
# Following block of codes handles the latter way. It adds a 'Select Image' button and
|
||||
# emit a signal when the button is clicked.
|
||||
if self.standalone:
|
||||
gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
|
||||
gobject.TYPE_NONE, ())
|
||||
icon = gtk.Image()
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
button = gtk.Button("Select Image")
|
||||
button.set_image(icon)
|
||||
#button.set_size_request(140, 50)
|
||||
table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
|
||||
button.connect("clicked", self.select_image_button_clicked_cb)
|
||||
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
|
||||
|
||||
self.usb_desc = gtk.Label()
|
||||
self.usb_desc.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
self.usb_combo = gtk.combo_box_new_text()
|
||||
self.usb_combo.connect("changed", self.usb_combo_changed_cb)
|
||||
model = self.usb_combo.get_model()
|
||||
model.clear()
|
||||
self.usb_combo.append_text(self.__dummy_usb__)
|
||||
for usb in self.find_all_usb_devices():
|
||||
self.usb_combo.append_text("/dev/" + usb)
|
||||
self.usb_combo.set_active(0)
|
||||
self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
|
||||
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.vbox.pack_start(self.progress_bar, expand=False, fill=False)
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=True, padding=10)
|
||||
|
||||
self.vbox.show_all()
|
||||
self.progress_bar.hide()
|
||||
|
||||
def set_image_text_buffer(self, image_path):
|
||||
self.buf.set_text(image_path)
|
||||
|
||||
def set_image_path(self, image_path):
|
||||
self.image_path = image_path
|
||||
|
||||
def popen_read(self, cmd):
|
||||
tmpout, errors = bb.process.run("%s" % cmd)
|
||||
return tmpout.strip()
|
||||
|
||||
def find_all_usb_devices(self):
|
||||
usb_devs = [ os.readlink(u)
|
||||
for u in glob.glob('/dev/disk/by-id/usb*')
|
||||
if not re.search(r'part\d+', u) ]
|
||||
return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
|
||||
|
||||
def get_usb_info(self, dev):
|
||||
return "%s %s" % \
|
||||
(self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
|
||||
self.popen_read('cat /sys/class/block/%s/device/model' % dev))
|
||||
|
||||
def select_image_button_clicked_cb(self, button):
|
||||
self.emit('select_image_clicked')
|
||||
|
||||
def usb_combo_changed_cb(self, usb_combo):
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_usb__:
|
||||
markup = "<span font_desc='12'>You haven't chosen any USB drive.</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
else:
|
||||
markup = "<span font_desc='12'>" + self.get_usb_info(combo_item.lstrip("/dev/")) + "</span>"
|
||||
self.usb_desc.set_markup(markup)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
lbl = ''
|
||||
msg = ''
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
|
||||
cmdline = bb.ui.crumbs.utils.which_terminal()
|
||||
if cmdline:
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
cmdline += "\"sudo dd if=" + self.image_path + \
|
||||
" of=" + combo_item + " && sync; echo $? > " + tmpfile.name + "\""
|
||||
subprocess.call(shlex.split(cmdline))
|
||||
|
||||
if int(tmpfile.readline().strip()) == 0:
|
||||
lbl = "<b>Deploy image successfully.</b>"
|
||||
else:
|
||||
lbl = "<b>Failed to deploy image.</b>"
|
||||
msg = "Please check image <b>%s</b> exists and USB device <b>%s</b> is writable." % (self.image_path, combo_item)
|
||||
tmpfile.close()
|
||||
else:
|
||||
if not self.image_path:
|
||||
lbl = "<b>No selection made.</b>"
|
||||
msg = "You have not selected an image to deploy."
|
||||
else:
|
||||
lbl = "<b>No selection made.</b>"
|
||||
msg = "You have not selected a USB device."
|
||||
if len(lbl):
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.MESSAGE_INFO, msg)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
self.progress_bar.set_title(title)
|
||||
self.progress_bar.set_rcstyle(status)
|
||||
|
||||
def write_file(self, ifile, ofile):
|
||||
self.progress_bar.reset()
|
||||
self.progress_bar.show()
|
||||
|
||||
f_from = os.open(ifile, os.O_RDONLY)
|
||||
f_to = os.open(ofile, os.O_WRONLY)
|
||||
|
||||
total_size = os.stat(ifile).st_size
|
||||
written_size = 0
|
||||
|
||||
while True:
|
||||
buf = os.read(f_from, 1024*1024)
|
||||
if not buf:
|
||||
break
|
||||
os.write(f_to, buf)
|
||||
written_size += 1024*1024
|
||||
self.update_progress_bar("Writing to usb:", written_size * 1.0/total_size)
|
||||
|
||||
self.update_progress_bar("Writing completed:", 1.0)
|
||||
os.close(f_from)
|
||||
os.close(f_to)
|
||||
self.progress_bar.hide()
|
||||
172
bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
Normal file
172
bitbake/lib/bb/ui/crumbs/hig/imageselectiondialog.py
Normal file
@@ -0,0 +1,172 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
from bb.ui.crumbs.hobwidget import HobViewTable, HobInfoButton, HobButton, HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class ImageSelectionDialog (CrumbsDialog):
|
||||
|
||||
__columns__ = [{
|
||||
'col_name' : 'Image name',
|
||||
'col_id' : 0,
|
||||
'col_style': 'text',
|
||||
'col_min' : 400,
|
||||
'col_max' : 400
|
||||
}, {
|
||||
'col_name' : 'Select',
|
||||
'col_id' : 1,
|
||||
'col_style': 'radio toggle',
|
||||
'col_min' : 160,
|
||||
'col_max' : 160
|
||||
}]
|
||||
|
||||
|
||||
def __init__(self, image_folder, image_types, title, parent, flags, buttons=None, image_extension = {}):
|
||||
super(ImageSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.image_folder = image_folder
|
||||
self.image_types = image_types
|
||||
self.image_list = []
|
||||
self.image_names = []
|
||||
self.image_extension = image_extension
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.fill_image_store()
|
||||
|
||||
def create_visual_elements(self):
|
||||
hbox = gtk.HBox(False, 6)
|
||||
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
entry = gtk.Entry()
|
||||
entry.set_text(self.image_folder)
|
||||
table = gtk.Table(1, 10, True)
|
||||
table.set_size_request(560, -1)
|
||||
hbox.pack_start(table, expand=False, fill=False)
|
||||
table.attach(entry, 0, 9, 0, 1)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON)
|
||||
open_button = gtk.Button()
|
||||
open_button.set_image(image)
|
||||
open_button.connect("clicked", self.select_path_cb, self, entry)
|
||||
table.attach(open_button, 9, 10, 0, 1)
|
||||
|
||||
self.image_table = HobViewTable(self.__columns__, "Images")
|
||||
self.image_table.set_size_request(-1, 300)
|
||||
self.image_table.connect("toggled", self.toggled_cb)
|
||||
self.image_table.connect_group_selection(self.table_selected_cb)
|
||||
self.image_table.connect("row-activated", self.row_actived_cb)
|
||||
self.vbox.pack_start(self.image_table, expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def change_image_cb(self, model, path, columnid):
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][columnid] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
model[path][columnid] = True
|
||||
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
self.change_image_cb(model, path, columnid)
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if paths:
|
||||
self.change_image_cb(model, paths[0], 1)
|
||||
|
||||
def row_actived_cb(self, tab, model, path):
|
||||
self.change_image_cb(model, path, 1)
|
||||
self.emit('response', gtk.RESPONSE_YES)
|
||||
|
||||
def select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
text = entry.get_text()
|
||||
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
entry.set_text(path)
|
||||
self.image_folder = path
|
||||
self.fill_image_store()
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def fill_image_store(self):
|
||||
self.image_list = []
|
||||
self.image_store.clear()
|
||||
imageset = set()
|
||||
for root, dirs, files in os.walk(self.image_folder):
|
||||
# ignore the sub directories
|
||||
dirs[:] = []
|
||||
for f in files:
|
||||
for image_type in self.image_types:
|
||||
if image_type in self.image_extension:
|
||||
real_types = self.image_extension[image_type]
|
||||
else:
|
||||
real_types = [image_type]
|
||||
for real_image_type in real_types:
|
||||
if f.endswith('.' + real_image_type):
|
||||
imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
|
||||
self.image_list.append(f)
|
||||
|
||||
for image in imageset:
|
||||
self.image_store.set(self.image_store.append(), 0, image, 1, False)
|
||||
|
||||
self.image_table.set_model(self.image_store)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
self.image_names = []
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][1]:
|
||||
for f in self.image_list:
|
||||
if f.startswith(self.image_store[path][0] + '.'):
|
||||
self.image_names.append(f)
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
298
bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
Normal file
298
bitbake/lib/bb/ui/crumbs/hig/layerselectiondialog.py
Normal file
@@ -0,0 +1,298 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import tempfile
|
||||
from bb.ui.crumbs.hobwidget import hic, HobButton, HobAltButton
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class CellRendererPixbufActivatable(gtk.CellRendererPixbuf):
|
||||
"""
|
||||
A custom CellRenderer implementation which is activatable
|
||||
so that we can handle user clicks
|
||||
"""
|
||||
__gsignals__ = { 'clicked' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)), }
|
||||
|
||||
def __init__(self):
|
||||
gtk.CellRendererPixbuf.__init__(self)
|
||||
self.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
|
||||
self.set_property('follow-state', True)
|
||||
|
||||
"""
|
||||
Respond to a user click on a cell
|
||||
"""
|
||||
def do_activate(self, even, widget, path, background_area, cell_area, flags):
|
||||
self.emit('clicked', path)
|
||||
|
||||
#
|
||||
# LayerSelectionDialog
|
||||
#
|
||||
class LayerSelectionDialog (CrumbsDialog):
|
||||
|
||||
TARGETS = [
|
||||
("MY_TREE_MODEL_ROW", gtk.TARGET_SAME_WIDGET, 0),
|
||||
("text/plain", 0, 1),
|
||||
("TEXT", 0, 2),
|
||||
("STRING", 0, 3),
|
||||
]
|
||||
|
||||
def gen_label_widget(self, content):
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup(content)
|
||||
label.show()
|
||||
return label
|
||||
|
||||
def layer_widget_toggled_cb(self, cell, path, layer_store):
|
||||
name = layer_store[path][0]
|
||||
toggle = not layer_store[path][1]
|
||||
layer_store[path][1] = toggle
|
||||
|
||||
def layer_widget_add_clicked_cb(self, action, layer_store, parent):
|
||||
dialog = gtk.FileChooserDialog("Add new layer", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
label = gtk.Label("Select the layer you wish to add")
|
||||
label.show()
|
||||
dialog.set_extra_widget(label)
|
||||
response = dialog.run()
|
||||
path = dialog.get_filename()
|
||||
dialog.destroy()
|
||||
|
||||
lbl = "<b>Error</b>"
|
||||
msg = "Unable to load layer <i>%s</i> because " % path
|
||||
if response == gtk.RESPONSE_YES:
|
||||
import os
|
||||
import os.path
|
||||
layers = []
|
||||
it = layer_store.get_iter_first()
|
||||
while it:
|
||||
layers.append(layer_store.get_value(it, 0))
|
||||
it = layer_store.iter_next(it)
|
||||
|
||||
if not path:
|
||||
msg += "it is an invalid path."
|
||||
elif not os.path.exists(path+"/conf/layer.conf"):
|
||||
msg += "there is no layer.conf inside the directory."
|
||||
elif path in layers:
|
||||
msg += "it is already in loaded layers."
|
||||
else:
|
||||
layer_store.append([path])
|
||||
return
|
||||
dialog = CrumbsMessageDialog(parent, lbl, gtk.MESSAGE_ERROR, msg)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def layer_widget_del_clicked_cb(self, action, tree_selection, layer_store):
|
||||
model, iter = tree_selection.get_selected()
|
||||
if iter:
|
||||
layer_store.remove(iter)
|
||||
|
||||
|
||||
def gen_layer_widget(self, layers, layers_avail, window, tooltip=""):
|
||||
hbox = gtk.HBox(False, 6)
|
||||
|
||||
layer_tv = gtk.TreeView()
|
||||
layer_tv.set_rules_hint(True)
|
||||
layer_tv.set_headers_visible(False)
|
||||
tree_selection = layer_tv.get_selection()
|
||||
tree_selection.set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# Allow enable drag and drop of rows including row move
|
||||
dnd_internal_target = ''
|
||||
dnd_targets = [(dnd_internal_target, gtk.TARGET_SAME_WIDGET, 0)]
|
||||
layer_tv.enable_model_drag_source( gtk.gdk.BUTTON1_MASK,
|
||||
dnd_targets,
|
||||
gtk.gdk.ACTION_MOVE)
|
||||
layer_tv.enable_model_drag_dest(dnd_targets,
|
||||
gtk.gdk.ACTION_MOVE)
|
||||
layer_tv.connect("drag_data_get", self.drag_data_get_cb)
|
||||
layer_tv.connect("drag_data_received", self.drag_data_received_cb)
|
||||
|
||||
col0= gtk.TreeViewColumn('Path')
|
||||
cell0 = gtk.CellRendererText()
|
||||
cell0.set_padding(5,2)
|
||||
col0.pack_start(cell0, True)
|
||||
col0.set_cell_data_func(cell0, self.draw_layer_path_cb)
|
||||
layer_tv.append_column(col0)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(layer_tv)
|
||||
|
||||
table_layer = gtk.Table(2, 10, False)
|
||||
hbox.pack_start(table_layer, expand=True, fill=True)
|
||||
|
||||
table_layer.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
layer_store = gtk.ListStore(gobject.TYPE_STRING)
|
||||
for layer in layers:
|
||||
layer_store.append([layer])
|
||||
|
||||
col1 = gtk.TreeViewColumn('Enabled')
|
||||
layer_tv.append_column(col1)
|
||||
|
||||
cell1 = CellRendererPixbufActivatable()
|
||||
cell1.set_fixed_size(-1,35)
|
||||
cell1.connect("clicked", self.del_cell_clicked_cb, layer_store)
|
||||
col1.pack_start(cell1, True)
|
||||
col1.set_cell_data_func(cell1, self.draw_delete_button_cb, layer_tv)
|
||||
|
||||
add_button = gtk.Button()
|
||||
add_button.set_relief(gtk.RELIEF_NONE)
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
add_button.add(box)
|
||||
add_button.connect("enter-notify-event", self.add_hover_cb)
|
||||
add_button.connect("leave-notify-event", self.add_leave_cb)
|
||||
self.im = gtk.Image()
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
|
||||
self.im.show()
|
||||
box.pack_start(self.im, expand=False, fill=False, padding=6)
|
||||
lbl = gtk.Label("Add layer")
|
||||
lbl.set_alignment(0.0, 0.5)
|
||||
lbl.show()
|
||||
box.pack_start(lbl, expand=True, fill=True, padding=6)
|
||||
add_button.connect("clicked", self.layer_widget_add_clicked_cb, layer_store, window)
|
||||
table_layer.attach(add_button, 0, 10, 1, 2, gtk.EXPAND | gtk.FILL, 0, 0, 6)
|
||||
layer_tv.set_model(layer_store)
|
||||
|
||||
hbox.show_all()
|
||||
|
||||
return hbox, layer_store
|
||||
|
||||
def drag_data_get_cb(self, treeview, context, selection, target_id, etime):
|
||||
treeselection = treeview.get_selection()
|
||||
model, iter = treeselection.get_selected()
|
||||
data = model.get_value(iter, 0)
|
||||
selection.set(selection.target, 8, data)
|
||||
|
||||
def drag_data_received_cb(self, treeview, context, x, y, selection, info, etime):
|
||||
model = treeview.get_model()
|
||||
data = selection.data
|
||||
drop_info = treeview.get_dest_row_at_pos(x, y)
|
||||
if drop_info:
|
||||
path, position = drop_info
|
||||
iter = model.get_iter(path)
|
||||
if (position == gtk.TREE_VIEW_DROP_BEFORE or position == gtk.TREE_VIEW_DROP_INTO_OR_BEFORE):
|
||||
model.insert_before(iter, [data])
|
||||
else:
|
||||
model.insert_after(iter, [data])
|
||||
else:
|
||||
model.append([data])
|
||||
if context.action == gtk.gdk.ACTION_MOVE:
|
||||
context.finish(True, True, etime)
|
||||
return
|
||||
|
||||
def add_hover_cb(self, button, event):
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_HOVER_FILE)
|
||||
|
||||
def add_leave_cb(self, button, event):
|
||||
self.im.set_from_file(hic.ICON_INDI_ADD_FILE)
|
||||
|
||||
def __init__(self, title, layers, layers_non_removable, all_layers, parent, flags, buttons=None):
|
||||
super(LayerSelectionDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
self.layers = layers
|
||||
self.layers_non_removable = layers_non_removable
|
||||
self.all_layers = all_layers
|
||||
self.layers_changed = False
|
||||
|
||||
# icon for remove button in TreeView
|
||||
im = gtk.Image()
|
||||
im.set_from_file(hic.ICON_INDI_REMOVE_FILE)
|
||||
self.rem_icon = im.get_pixbuf()
|
||||
|
||||
# class members for internal use
|
||||
self.layer_store = None
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
|
||||
layer_widget.set_size_request(450, 250)
|
||||
self.vbox.pack_start(layer_widget, expand=True, fill=True)
|
||||
self.show_all()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
model = self.layer_store
|
||||
it = model.get_iter_first()
|
||||
layers = []
|
||||
while it:
|
||||
layers.append(model.get_value(it, 0))
|
||||
it = model.iter_next(it)
|
||||
|
||||
self.layers_changed = (self.layers != layers)
|
||||
self.layers = layers
|
||||
|
||||
"""
|
||||
A custom cell_data_func to draw a delete 'button' in the TreeView for layers
|
||||
other than the meta layer. The deletion of which is prevented so that the
|
||||
user can't shoot themselves in the foot too badly.
|
||||
"""
|
||||
def draw_delete_button_cb(self, col, cell, model, it, tv):
|
||||
path = model.get_value(it, 0)
|
||||
if path in self.layers_non_removable:
|
||||
cell.set_sensitive(False)
|
||||
cell.set_property('pixbuf', None)
|
||||
cell.set_property('mode', gtk.CELL_RENDERER_MODE_INERT)
|
||||
else:
|
||||
cell.set_property('pixbuf', self.rem_icon)
|
||||
cell.set_sensitive(True)
|
||||
cell.set_property('mode', gtk.CELL_RENDERER_MODE_ACTIVATABLE)
|
||||
|
||||
return True
|
||||
|
||||
"""
|
||||
A custom cell_data_func to write an extra message into the layer path cell
|
||||
for the meta layer. We should inform the user that they can't remove it for
|
||||
their own safety.
|
||||
"""
|
||||
def draw_layer_path_cb(self, col, cell, model, it):
|
||||
path = model.get_value(it, 0)
|
||||
if path in self.layers_non_removable:
|
||||
cell.set_property('markup', "<b>It cannot be removed</b>\n%s" % path)
|
||||
else:
|
||||
cell.set_property('text', path)
|
||||
|
||||
def del_cell_clicked_cb(self, cell, path, model):
|
||||
it = model.get_iter_from_string(path)
|
||||
model.remove(it)
|
||||
437
bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
Normal file
437
bitbake/lib/bb/ui/crumbs/hig/propertydialog.py
Normal file
@@ -0,0 +1,437 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2013 Intel Corporation
|
||||
#
|
||||
# Authored by Andrei Dinu <andrei.adrianx.dinu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import string
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import tempfile
|
||||
import glib
|
||||
from bb.ui.crumbs.hig.crumbsdialog import CrumbsDialog
|
||||
from bb.ui.crumbs.hig.settingsuihelper import SettingsUIHelper
|
||||
from bb.ui.crumbs.hig.crumbsmessagedialog import CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hig.layerselectiondialog import LayerSelectionDialog
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class PropertyDialog(CrumbsDialog):
|
||||
|
||||
def __init__(self, title, parent, information, flags, buttons=None):
|
||||
|
||||
super(PropertyDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.properties = information
|
||||
|
||||
if len(self.properties) == 10:
|
||||
self.create_recipe_visual_elements()
|
||||
elif len(self.properties) == 5:
|
||||
self.create_package_visual_elements()
|
||||
else:
|
||||
self.create_information_visual_elements()
|
||||
|
||||
|
||||
def create_information_visual_elements(self):
|
||||
|
||||
HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("icons/"))
|
||||
ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
self.table = gtk.Table(1,1,False)
|
||||
self.table.set_row_spacings(0)
|
||||
self.table.set_col_spacings(0)
|
||||
|
||||
self.image = gtk.Image()
|
||||
self.image.set_from_file(ICON_PACKAGES_DISPLAY_FILE)
|
||||
self.image.set_property("xalign",0)
|
||||
#self.vbox.add(self.image)
|
||||
|
||||
image_info = self.properties.split("*")[0]
|
||||
info = self.properties.split("*")[1]
|
||||
|
||||
vbox = gtk.VBox(True, spacing=30)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(False)
|
||||
self.label_short.set_markup(image_info)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.info_label = gtk.Label()
|
||||
self.info_label.set_line_wrap(True)
|
||||
self.info_label.set_markup(info)
|
||||
self.info_label.set_property("yalign", 0.5)
|
||||
|
||||
self.table.attach(self.image, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=5,ypadding=5)
|
||||
self.table.attach(self.label_short, 0,1,0,1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=5)
|
||||
self.table.attach(self.info_label, 0,1,1,2, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL,xpadding=40,ypadding=10)
|
||||
|
||||
self.vbox.add(self.table)
|
||||
self.connect('delete-event', lambda w, e: self.destroy() or True)
|
||||
|
||||
def treeViewTooltip( self, widget, e, tooltips, cell, emptyText="" ):
|
||||
try:
|
||||
(path,col,x,y) = widget.get_path_at_pos( int(e.x), int(e.y) )
|
||||
it = widget.get_model().get_iter(path)
|
||||
value = widget.get_model().get_value(it,cell)
|
||||
if value in self.tooltip_items:
|
||||
tooltips.set_tip(widget, self.tooltip_items[value])
|
||||
tooltips.enable()
|
||||
else:
|
||||
tooltips.set_tip(widget, emptyText)
|
||||
except:
|
||||
tooltips.set_tip(widget, emptyText)
|
||||
|
||||
|
||||
def create_package_visual_elements(self):
|
||||
|
||||
import json
|
||||
|
||||
name = self.properties['name']
|
||||
binb = self.properties['binb']
|
||||
size = self.properties['size']
|
||||
recipe = self.properties['recipe']
|
||||
file_list = json.loads(self.properties['files_list'])
|
||||
|
||||
files_temp = ''
|
||||
paths_temp = ''
|
||||
files_binb = []
|
||||
paths_binb = []
|
||||
|
||||
self.tooltip_items = {}
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
#cleaning out the recipe variable
|
||||
recipe = recipe.split("+")[0]
|
||||
|
||||
vbox = gtk.VBox(True,spacing = 0)
|
||||
|
||||
###################################### NAME ROW + COL #################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
###################################### SIZE ROW + COL ######################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Size: </span>" + size)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### RECIPE ROW + COL #########################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_size_request(300,-1)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Recipe: </span>" + recipe)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### BINB ROW + COL #######################################
|
||||
|
||||
if binb != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_size_request(300,-1)
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
#################################### FILES BROUGHT BY PACKAGES ###################################
|
||||
|
||||
if file_list:
|
||||
|
||||
self.textWindow = gtk.ScrolledWindow()
|
||||
self.textWindow.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.textWindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
self.textWindow.set_size_request(100, 170)
|
||||
|
||||
packagefiles_store = gtk.ListStore(str)
|
||||
|
||||
self.packagefiles_tv = gtk.TreeView()
|
||||
self.packagefiles_tv.set_rules_hint(True)
|
||||
self.packagefiles_tv.set_headers_visible(True)
|
||||
self.textWindow.add(self.packagefiles_tv)
|
||||
|
||||
self.cell1 = gtk.CellRendererText()
|
||||
col1 = gtk.TreeViewColumn('Package files', self.cell1)
|
||||
col1.set_cell_data_func(self.cell1, self.regex_field)
|
||||
self.packagefiles_tv.append_column(col1)
|
||||
|
||||
items = file_list.keys()
|
||||
items.sort()
|
||||
for item in items:
|
||||
fullpath = item
|
||||
while len(item) > 35:
|
||||
item = item[:len(item)/2] + "" + item[len(item)/2+1:]
|
||||
if len(item) == 35:
|
||||
item = item[:len(item)/2] + "..." + item[len(item)/2+3:]
|
||||
self.tooltip_items[item] = fullpath
|
||||
|
||||
packagefiles_store.append([str(item)])
|
||||
|
||||
self.packagefiles_tv.set_model(packagefiles_store)
|
||||
|
||||
tips = gtk.Tooltips()
|
||||
tips.set_tip(self.packagefiles_tv, "")
|
||||
self.packagefiles_tv.connect("motion-notify-event", self.treeViewTooltip, tips, 0)
|
||||
self.packagefiles_tv.set_events(gtk.gdk.POINTER_MOTION_MASK)
|
||||
|
||||
self.vbox.add(self.textWindow)
|
||||
|
||||
self.vbox.show_all()
|
||||
|
||||
|
||||
def regex_field(self, column, cell, model, iter):
|
||||
cell.set_property('text', model.get_value(iter, 0))
|
||||
return
|
||||
|
||||
|
||||
def create_recipe_visual_elements(self):
|
||||
|
||||
summary = self.properties['summary']
|
||||
name = self.properties['name']
|
||||
version = self.properties['version']
|
||||
revision = self.properties['revision']
|
||||
binb = self.properties['binb']
|
||||
group = self.properties['group']
|
||||
license = self.properties['license']
|
||||
homepage = self.properties['homepage']
|
||||
bugtracker = self.properties['bugtracker']
|
||||
description = self.properties['description']
|
||||
|
||||
self.set_resizable(False)
|
||||
|
||||
#cleaning out the version variable and also the summary
|
||||
version = version.split(":")[1]
|
||||
if len(version) > 30:
|
||||
version = version.split("+")[0]
|
||||
else:
|
||||
version = version.split("-")[0]
|
||||
license = license.replace("&" , "and")
|
||||
if (homepage == ''):
|
||||
homepage = 'unknown'
|
||||
if (bugtracker == ''):
|
||||
bugtracker = 'unknown'
|
||||
summary = summary.split("+")[0]
|
||||
|
||||
#calculating the rows needed for the table
|
||||
binb_items_count = len(binb.split(','))
|
||||
binb_items = binb.split(',')
|
||||
|
||||
vbox = gtk.VBox(False,spacing = 0)
|
||||
|
||||
######################################## SUMMARY LABEL #########################################
|
||||
|
||||
if summary != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_width_chars(37)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>" + summary + "</b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
########################################## NAME ROW + COL #######################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Name: </span>" + name)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
####################################### VERSION ROW + COL ####################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Version: </span>" + version)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
##################################### REVISION ROW + COL #####################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Revision: </span>" + revision)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################## GROUP ROW + COL ############################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Group: </span>" + group)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################# HOMEPAGE ROW + COL ############################################
|
||||
|
||||
if homepage != 'unknown':
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
if len(homepage) > 35:
|
||||
self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:35] + "..." + "</a>")
|
||||
else:
|
||||
self.label_info.set_markup("<a href=\"" + homepage + "\">" + homepage[0:60] + "</a>")
|
||||
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>Homepage: </b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################# BUGTRACKER ROW + COL ###########################################
|
||||
|
||||
if bugtracker != 'unknown':
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
if len(bugtracker) > 35:
|
||||
self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:35] + "..." + "</a>")
|
||||
else:
|
||||
self.label_info.set_markup("<a href=\"" + bugtracker + "\">" + bugtracker[0:60] + "</a>")
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<b>Bugtracker: </b>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################# LICENSE ROW + COL ############################################
|
||||
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.label_info.set_markup(license)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">License: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
|
||||
self.vbox.add(self.label_short)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################### BINB ROW+COL #############################################
|
||||
|
||||
if binb != '':
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Brought in by: </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.vbox.add(self.label_short)
|
||||
self.label_info = gtk.Label()
|
||||
self.label_info.set_selectable(True)
|
||||
self.label_info.set_width_chars(36)
|
||||
if len(binb) > 200:
|
||||
scrolled_window = gtk.ScrolledWindow()
|
||||
scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
|
||||
scrolled_window.set_size_request(100,100)
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_padding(6,6)
|
||||
self.label_info.set_alignment(0,0)
|
||||
self.label_info.set_line_wrap(True)
|
||||
scrolled_window.add_with_viewport(self.label_info)
|
||||
self.vbox.add(scrolled_window)
|
||||
else:
|
||||
self.label_info.set_markup(binb)
|
||||
self.label_info.set_property("xalign", 0)
|
||||
self.label_info.set_line_wrap(True)
|
||||
self.vbox.add(self.label_info)
|
||||
|
||||
################################ DESCRIPTION TAG ROW #################################################
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.label_short.set_markup("<span weight=\"bold\">Description </span>")
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
################################ DESCRIPTION INFORMATION ROW ##########################################
|
||||
|
||||
hbox = gtk.HBox(True,spacing = 0)
|
||||
|
||||
self.label_short = gtk.Label()
|
||||
self.label_short.set_selectable(True)
|
||||
self.label_short.set_width_chars(36)
|
||||
if len(description) > 200:
|
||||
scrolled_window = gtk.ScrolledWindow()
|
||||
scrolled_window.set_policy(gtk.POLICY_NEVER,gtk.POLICY_ALWAYS)
|
||||
scrolled_window.set_size_request(100,100)
|
||||
self.label_short.set_markup(description)
|
||||
self.label_short.set_padding(6,6)
|
||||
self.label_short.set_alignment(0,0)
|
||||
self.label_short.set_line_wrap(True)
|
||||
scrolled_window.add_with_viewport(self.label_short)
|
||||
self.vbox.add(scrolled_window)
|
||||
else:
|
||||
self.label_short.set_markup(description)
|
||||
self.label_short.set_property("xalign", 0)
|
||||
self.label_short.set_line_wrap(True)
|
||||
self.vbox.add(self.label_short)
|
||||
|
||||
self.vbox.show_all()
|
||||
122
bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
Normal file
122
bitbake/lib/bb/ui/crumbs/hig/settingsuihelper.py
Normal file
@@ -0,0 +1,122 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import os
|
||||
from bb.ui.crumbs.hobwidget import HobInfoButton, HobButton, HobAltButton
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
BitBake GUI's
|
||||
In summary: spacing = 12px, border-width = 6px
|
||||
"""
|
||||
|
||||
class SettingsUIHelper():
|
||||
|
||||
def gen_label_widget(self, content):
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0, 0)
|
||||
label.set_markup(content)
|
||||
label.show()
|
||||
return label
|
||||
|
||||
def gen_label_info_widget(self, content, tooltip):
|
||||
table = gtk.Table(1, 10, False)
|
||||
label = self.gen_label_widget(content)
|
||||
info = HobInfoButton(tooltip, self)
|
||||
table.attach(label, 0, 1, 0, 1, xoptions=gtk.FILL)
|
||||
table.attach(info, 1, 2, 0, 1, xoptions=gtk.FILL, xpadding=10)
|
||||
return table
|
||||
|
||||
def gen_spinner_widget(self, content, lower, upper, tooltip=""):
|
||||
hbox = gtk.HBox(False, 12)
|
||||
adjust = gtk.Adjustment(value=content, lower=lower, upper=upper, step_incr=1)
|
||||
spinner = gtk.SpinButton(adjustment=adjust, climb_rate=1, digits=0)
|
||||
|
||||
spinner.set_value(content)
|
||||
hbox.pack_start(spinner, expand=False, fill=False)
|
||||
|
||||
info = HobInfoButton(tooltip, self)
|
||||
hbox.pack_start(info, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
return hbox, spinner
|
||||
|
||||
def gen_combo_widget(self, curr_item, all_item, tooltip=""):
|
||||
hbox = gtk.HBox(False, 12)
|
||||
combo = gtk.combo_box_new_text()
|
||||
hbox.pack_start(combo, expand=False, fill=False)
|
||||
|
||||
index = 0
|
||||
for item in all_item or []:
|
||||
combo.append_text(item)
|
||||
if item == curr_item:
|
||||
combo.set_active(index)
|
||||
index += 1
|
||||
|
||||
info = HobInfoButton(tooltip, self)
|
||||
hbox.pack_start(info, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
return hbox, combo
|
||||
|
||||
def entry_widget_select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
text = entry.get_text()
|
||||
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
path = dialog.get_filename()
|
||||
entry.set_text(path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def gen_entry_widget(self, content, parent, tooltip="", need_button=True):
|
||||
hbox = gtk.HBox(False, 12)
|
||||
entry = gtk.Entry()
|
||||
entry.set_text(content)
|
||||
entry.set_size_request(350,30)
|
||||
|
||||
if need_button:
|
||||
table = gtk.Table(1, 10, False)
|
||||
hbox.pack_start(table, expand=True, fill=True)
|
||||
table.attach(entry, 0, 9, 0, 1, xoptions=gtk.SHRINK)
|
||||
image = gtk.Image()
|
||||
image.set_from_stock(gtk.STOCK_OPEN,gtk.ICON_SIZE_BUTTON)
|
||||
open_button = gtk.Button()
|
||||
open_button.set_image(image)
|
||||
open_button.connect("clicked", self.entry_widget_select_path_cb, parent, entry)
|
||||
table.attach(open_button, 9, 10, 0, 1, xoptions=gtk.SHRINK)
|
||||
else:
|
||||
hbox.pack_start(entry, expand=True, fill=True)
|
||||
|
||||
if tooltip != "":
|
||||
info = HobInfoButton(tooltip, self)
|
||||
hbox.pack_start(info, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
return hbox, entry
|
||||
@@ -1,10 +1,9 @@
|
||||
#
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# BitBake Toaster Implementation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Copyright (C) 2016 Intel Corporation
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -19,23 +18,21 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# Django settings for Toaster project.
|
||||
class HobColors:
|
||||
WHITE = "#ffffff"
|
||||
PALE_GREEN = "#aaffaa"
|
||||
ORANGE = "#eb8e68"
|
||||
PALE_RED = "#ffaaaa"
|
||||
GRAY = "#aaaaaa"
|
||||
LIGHT_GRAY = "#dddddd"
|
||||
SLIGHT_DARK = "#5f5f5f"
|
||||
DARK = "#3c3b37"
|
||||
BLACK = "#000000"
|
||||
PALE_BLUE = "#53b8ff"
|
||||
DEEP_RED = "#aa3e3e"
|
||||
KHAKI = "#fff68f"
|
||||
|
||||
# Settings overlay to use for running tests
|
||||
# DJANGO_SETTINGS_MODULE=toastermain.settings-test
|
||||
|
||||
from toastermain.settings import *
|
||||
|
||||
DEBUG = True
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': '/tmp/toaster-test-db.sqlite',
|
||||
'TEST': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': '/tmp/toaster-test-db.sqlite',
|
||||
}
|
||||
}
|
||||
}
|
||||
OK = WHITE
|
||||
RUNNING = PALE_GREEN
|
||||
WARNING = ORANGE
|
||||
ERROR = PALE_RED
|
||||
904
bitbake/lib/bb/ui/crumbs/hobwidget.py
Normal file
904
bitbake/lib/bb/ui/crumbs/hobwidget.py
Normal file
@@ -0,0 +1,904 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011-2012 Intel Corporation
|
||||
#
|
||||
# Authored by Dongxiao Xu <dongxiao.xu@intel.com>
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
import gtk
|
||||
import gobject
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import pango, pangocairo
|
||||
import cairo
|
||||
import math
|
||||
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.persistenttooltip import PersistentTooltip
|
||||
|
||||
class hwc:
|
||||
|
||||
MAIN_WIN_WIDTH = 1024
|
||||
MAIN_WIN_HEIGHT = 700
|
||||
|
||||
class hic:
|
||||
|
||||
HOB_ICON_BASE_DIR = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), ("ui/icons/"))
|
||||
|
||||
ICON_RCIPE_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_display.png'))
|
||||
ICON_RCIPE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('recipe/recipe_hover.png'))
|
||||
ICON_PACKAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_display.png'))
|
||||
ICON_PACKAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('packages/packages_hover.png'))
|
||||
ICON_LAYERS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_display.png'))
|
||||
ICON_LAYERS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('layers/layers_hover.png'))
|
||||
ICON_IMAGES_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_display.png'))
|
||||
ICON_IMAGES_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('images/images_hover.png'))
|
||||
ICON_SETTINGS_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_display.png'))
|
||||
ICON_SETTINGS_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('settings/settings_hover.png'))
|
||||
ICON_INFO_DISPLAY_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_display.png'))
|
||||
ICON_INFO_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('info/info_hover.png'))
|
||||
ICON_INDI_CONFIRM_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/confirmation.png'))
|
||||
ICON_INDI_ERROR_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/denied.png'))
|
||||
ICON_INDI_REMOVE_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove.png'))
|
||||
ICON_INDI_REMOVE_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/remove-hover.png'))
|
||||
ICON_INDI_ADD_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add.png'))
|
||||
ICON_INDI_ADD_HOVER_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/add-hover.png'))
|
||||
ICON_INDI_REFRESH_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/refresh.png'))
|
||||
ICON_INDI_ALERT_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/alert.png'))
|
||||
ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
|
||||
ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
|
||||
|
||||
class HobViewTable (gtk.VBox):
|
||||
"""
|
||||
A VBox to contain the table for different recipe views and package view
|
||||
"""
|
||||
__gsignals__ = {
|
||||
"toggled" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
"row-activated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
"cell-fadeinout-stopped" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,
|
||||
gobject.TYPE_PYOBJECT,)),
|
||||
}
|
||||
|
||||
def __init__(self, columns, name):
|
||||
gtk.VBox.__init__(self, False, 6)
|
||||
self.table_tree = gtk.TreeView()
|
||||
self.table_tree.set_headers_visible(True)
|
||||
self.table_tree.set_headers_clickable(True)
|
||||
self.table_tree.set_rules_hint(True)
|
||||
self.table_tree.set_enable_tree_lines(True)
|
||||
self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
self.toggle_columns = []
|
||||
self.table_tree.connect("row-activated", self.row_activated_cb)
|
||||
self.top_bar = None
|
||||
self.tab_name = name
|
||||
|
||||
for i, column in enumerate(columns):
|
||||
col_name = column['col_name']
|
||||
col = gtk.TreeViewColumn(col_name)
|
||||
col.set_clickable(True)
|
||||
col.set_resizable(True)
|
||||
if self.tab_name.startswith('Included'):
|
||||
if col_name!='Included':
|
||||
col.set_sort_column_id(column['col_id'])
|
||||
else:
|
||||
col.set_sort_column_id(column['col_id'])
|
||||
if 'col_min' in column.keys():
|
||||
col.set_min_width(column['col_min'])
|
||||
if 'col_max' in column.keys():
|
||||
col.set_max_width(column['col_max'])
|
||||
if 'expand' in column.keys():
|
||||
col.set_expand(True)
|
||||
self.table_tree.append_column(col)
|
||||
|
||||
if (not 'col_style' in column.keys()) or column['col_style'] == 'text':
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_attributes(cell, text=column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
elif column['col_style'] == 'check toggle':
|
||||
cell = HobCellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
cell.connect("toggled", self.toggled_cb, i, self.table_tree)
|
||||
cell.connect_render_state_changed(self.stop_cell_fadeinout_cb, self.table_tree)
|
||||
self.toggle_id = i
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(col_name)
|
||||
if 'col_group' in column.keys():
|
||||
col.set_cell_data_func(cell, self.set_group_number_cb)
|
||||
elif column['col_style'] == 'radio toggle':
|
||||
cell = gtk.CellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
cell.set_radio(True)
|
||||
cell.connect("toggled", self.toggled_cb, i, self.table_tree)
|
||||
self.toggle_id = i
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(col_name)
|
||||
elif column['col_style'] == 'binb':
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
|
||||
self.scroll = gtk.ScrolledWindow()
|
||||
self.scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
self.scroll.add(self.table_tree)
|
||||
|
||||
self.pack_end(self.scroll, True, True, 0)
|
||||
|
||||
def add_no_result_bar(self, entry):
|
||||
color = HobColors.KHAKI
|
||||
self.top_bar = gtk.EventBox()
|
||||
self.top_bar.set_size_request(-1, 70)
|
||||
self.top_bar.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
self.top_bar.set_flags(gtk.CAN_DEFAULT)
|
||||
self.top_bar.grab_default()
|
||||
|
||||
no_result_tab = gtk.Table(5, 20, True)
|
||||
self.top_bar.add(no_result_tab)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
title = "No results matching your search"
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % title)
|
||||
no_result_tab.attach(label, 1, 14, 1, 4)
|
||||
|
||||
clear_button = HobButton("Clear search")
|
||||
clear_button.set_tooltip_text("Clear search query")
|
||||
clear_button.connect('clicked', self.set_search_entry_clear_cb, entry)
|
||||
no_result_tab.attach(clear_button, 16, 19, 1, 4)
|
||||
|
||||
self.pack_start(self.top_bar, False, True, 12)
|
||||
self.top_bar.show_all()
|
||||
|
||||
def set_search_entry_clear_cb(self, button, search):
|
||||
if search.get_editable() == True:
|
||||
search.set_text("")
|
||||
search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
|
||||
search.grab_focus()
|
||||
|
||||
def display_binb_cb(self, col, cell, model, it, col_id):
|
||||
binb = model.get_value(it, col_id)
|
||||
# Just display the first item
|
||||
if binb:
|
||||
bin = binb.split(', ')
|
||||
total_no = len(bin)
|
||||
if total_no > 1 and bin[0] == "User Selected":
|
||||
if total_no > 2:
|
||||
present_binb = bin[1] + ' (+' + str(total_no - 1) + ')'
|
||||
else:
|
||||
present_binb = bin[1]
|
||||
else:
|
||||
if total_no > 1:
|
||||
present_binb = bin[0] + ' (+' + str(total_no - 1) + ')'
|
||||
else:
|
||||
present_binb = bin[0]
|
||||
cell.set_property('text', present_binb)
|
||||
else:
|
||||
cell.set_property('text', "")
|
||||
return True
|
||||
|
||||
def set_model(self, tree_model):
|
||||
self.table_tree.set_model(tree_model)
|
||||
|
||||
def toggle_default(self):
|
||||
model = self.table_tree.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
if iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][self.toggle_id] = True
|
||||
|
||||
def toggled_cb(self, cell, path, columnid, tree):
|
||||
self.emit("toggled", cell, path, columnid, tree)
|
||||
|
||||
def row_activated_cb(self, tree, path, view_column):
|
||||
if not view_column.get_title() in self.toggle_columns:
|
||||
self.emit("row-activated", tree.get_model(), path)
|
||||
|
||||
def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
|
||||
self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
|
||||
|
||||
def set_group_number_cb(self, col, cell, model, iter):
|
||||
if model and (model.iter_parent(iter) == None):
|
||||
cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
|
||||
else:
|
||||
cell.cell_attr["number_of_children"] = 0
|
||||
|
||||
def connect_group_selection(self, cb_func):
|
||||
self.table_tree.get_selection().connect("changed", cb_func)
|
||||
|
||||
"""
|
||||
A method to calculate a softened value for the colour of widget when in the
|
||||
provided state.
|
||||
|
||||
widget: the widget whose style to use
|
||||
state: the state of the widget to use the style for
|
||||
|
||||
Returns a string value representing the softened colour
|
||||
"""
|
||||
def soften_color(widget, state=gtk.STATE_NORMAL):
|
||||
# this colour munging routine is heavily inspired bu gdu_util_get_mix_color()
|
||||
# from gnome-disk-utility:
|
||||
# http://git.gnome.org/browse/gnome-disk-utility/tree/src/gdu-gtk/gdu-gtk.c?h=gnome-3-0
|
||||
blend = 0.7
|
||||
style = widget.get_style()
|
||||
color = style.text[state]
|
||||
color.red = color.red * blend + style.base[state].red * (1.0 - blend)
|
||||
color.green = color.green * blend + style.base[state].green * (1.0 - blend)
|
||||
color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
|
||||
return color.to_string()
|
||||
|
||||
class BaseHobButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button subclass which follows the visual design of Hob for primary
|
||||
action buttons
|
||||
|
||||
label: the text to display as the button's label
|
||||
"""
|
||||
def __init__(self, label):
|
||||
gtk.Button.__init__(self, label)
|
||||
HobButton.style_button(self)
|
||||
|
||||
@staticmethod
|
||||
def style_button(button):
|
||||
style = button.get_style()
|
||||
style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
|
||||
|
||||
button.set_flags(gtk.CAN_DEFAULT)
|
||||
button.grab_default()
|
||||
|
||||
# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
|
||||
label = button.get_label()
|
||||
button.set_label(label)
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
class HobButton(BaseHobButton):
|
||||
"""
|
||||
A gtk.Button subclass which follows the visual design of Hob for primary
|
||||
action buttons
|
||||
|
||||
label: the text to display as the button's label
|
||||
"""
|
||||
def __init__(self, label):
|
||||
BaseHobButton.__init__(self, label)
|
||||
HobButton.style_button(self)
|
||||
|
||||
class HobAltButton(BaseHobButton):
|
||||
"""
|
||||
A gtk.Button subclass which has no relief, and so is more discrete
|
||||
"""
|
||||
def __init__(self, label):
|
||||
BaseHobButton.__init__(self, label)
|
||||
HobAltButton.style_button(self)
|
||||
|
||||
"""
|
||||
A callback for the state-changed event to ensure the text is displayed
|
||||
differently when the widget is not sensitive
|
||||
"""
|
||||
@staticmethod
|
||||
def desensitise_on_state_change_cb(button, state):
|
||||
if not button.get_property("sensitive"):
|
||||
HobAltButton.set_text(button, False)
|
||||
else:
|
||||
HobAltButton.set_text(button, True)
|
||||
|
||||
"""
|
||||
Set the button label with an appropriate colour for the current widget state
|
||||
"""
|
||||
@staticmethod
|
||||
def set_text(button, sensitive=True):
|
||||
if sensitive:
|
||||
colour = HobColors.PALE_BLUE
|
||||
else:
|
||||
colour = HobColors.LIGHT_GRAY
|
||||
button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
class HobImageButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button with an icon and two rows of text, the second of which is
|
||||
displayed in a blended colour.
|
||||
|
||||
primary_text: the main button label
|
||||
secondary_text: optional second line of text
|
||||
icon_path: path to the icon file to display on the button
|
||||
"""
|
||||
def __init__(self, primary_text, secondary_text="", icon_path="", hover_icon_path=""):
|
||||
gtk.Button.__init__(self)
|
||||
self.set_relief(gtk.RELIEF_NONE)
|
||||
|
||||
self.icon_path = icon_path
|
||||
self.hover_icon_path = hover_icon_path
|
||||
|
||||
hbox = gtk.HBox(False, 10)
|
||||
hbox.show()
|
||||
self.add(hbox)
|
||||
self.icon = gtk.Image()
|
||||
self.icon.set_from_file(self.icon_path)
|
||||
self.icon.set_alignment(0.5, 0.0)
|
||||
self.icon.show()
|
||||
if self.hover_icon_path and len(self.hover_icon_path):
|
||||
self.connect("enter-notify-event", self.set_hover_icon_cb)
|
||||
self.connect("leave-notify-event", self.set_icon_cb)
|
||||
hbox.pack_start(self.icon, False, False, 0)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
colour = soften_color(label)
|
||||
mark = "<span size='x-large'>%s</span>\n<span size='medium' fgcolor='%s' weight='ultralight'>%s</span>" % (primary_text, colour, secondary_text)
|
||||
label.set_markup(mark)
|
||||
label.show()
|
||||
hbox.pack_start(label, True, True, 0)
|
||||
|
||||
def set_hover_icon_cb(self, widget, event):
|
||||
self.icon.set_from_file(self.hover_icon_path)
|
||||
|
||||
def set_icon_cb(self, widget, event):
|
||||
self.icon.set_from_file(self.icon_path)
|
||||
|
||||
class HobInfoButton(gtk.EventBox):
|
||||
"""
|
||||
This class implements a button-like widget per the Hob visual and UX designs
|
||||
which will display a persistent tooltip, with the contents of tip_markup, when
|
||||
clicked.
|
||||
|
||||
tip_markup: the Pango Markup to be displayed in the persistent tooltip
|
||||
"""
|
||||
def __init__(self, tip_markup, parent=None):
|
||||
gtk.EventBox.__init__(self)
|
||||
self.image = gtk.Image()
|
||||
self.image.set_from_file(
|
||||
hic.ICON_INFO_DISPLAY_FILE)
|
||||
self.image.show()
|
||||
self.add(self.image)
|
||||
self.tip_markup = tip_markup
|
||||
self.my_parent = parent
|
||||
|
||||
self.set_events(gtk.gdk.BUTTON_RELEASE |
|
||||
gtk.gdk.ENTER_NOTIFY_MASK |
|
||||
gtk.gdk.LEAVE_NOTIFY_MASK)
|
||||
|
||||
self.connect("button-release-event", self.button_release_cb)
|
||||
self.connect("enter-notify-event", self.mouse_in_cb)
|
||||
self.connect("leave-notify-event", self.mouse_out_cb)
|
||||
|
||||
"""
|
||||
When the mouse click is released emulate a button-click and show the associated
|
||||
PersistentTooltip
|
||||
"""
|
||||
def button_release_cb(self, widget, event):
|
||||
from bb.ui.crumbs.hig.propertydialog import PropertyDialog
|
||||
self.dialog = PropertyDialog(title = '',
|
||||
parent = self.my_parent,
|
||||
information = self.tip_markup,
|
||||
flags = gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR)
|
||||
|
||||
button = self.dialog.add_button("Close", gtk.RESPONSE_CANCEL)
|
||||
HobAltButton.style_button(button)
|
||||
button.connect("clicked", lambda w: self.dialog.destroy())
|
||||
self.dialog.show_all()
|
||||
self.dialog.run()
|
||||
|
||||
"""
|
||||
Change to the prelight image when the mouse enters the widget
|
||||
"""
|
||||
def mouse_in_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_HOVER_FILE)
|
||||
|
||||
"""
|
||||
Change to the stock image when the mouse enters the widget
|
||||
"""
|
||||
def mouse_out_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
|
||||
class HobIndicator(gtk.DrawingArea):
|
||||
def __init__(self, count):
|
||||
gtk.DrawingArea.__init__(self)
|
||||
# Set no window for transparent background
|
||||
self.set_has_window(False)
|
||||
self.set_size_request(38,38)
|
||||
# We need to pass through button clicks
|
||||
self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
|
||||
self.connect('expose-event', self.expose)
|
||||
|
||||
self.count = count
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
def expose(self, widget, event):
|
||||
if self.count and self.count > 0:
|
||||
ctx = widget.window.cairo_create()
|
||||
|
||||
x, y, w, h = self.allocation
|
||||
|
||||
ctx.set_operator(cairo.OPERATOR_OVER)
|
||||
ctx.set_source_color(gtk.gdk.color_parse(self.color))
|
||||
ctx.translate(w/2, h/2)
|
||||
ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
|
||||
ctx.fill_preserve()
|
||||
|
||||
layout = self.create_pango_layout(str(self.count))
|
||||
textw, texth = layout.get_pixel_size()
|
||||
x = (w/2)-(textw/2) + x
|
||||
y = (h/2) - (texth/2) + y
|
||||
ctx.move_to(x, y)
|
||||
self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
|
||||
|
||||
def set_count(self, count):
|
||||
self.count = count
|
||||
|
||||
def set_active(self, active):
|
||||
if active:
|
||||
self.color = HobColors.DEEP_RED
|
||||
else:
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
class HobTabLabel(gtk.HBox):
|
||||
def __init__(self, text, count=0):
|
||||
gtk.HBox.__init__(self, False, 0)
|
||||
self.indicator = HobIndicator(count)
|
||||
self.indicator.show()
|
||||
self.pack_end(self.indicator, False, False)
|
||||
self.lbl = gtk.Label(text)
|
||||
self.lbl.set_alignment(0.0, 0.5)
|
||||
self.lbl.show()
|
||||
self.pack_end(self.lbl, True, True, 6)
|
||||
|
||||
def set_count(self, count):
|
||||
self.indicator.set_count(count)
|
||||
|
||||
def set_active(self, active=True):
|
||||
self.indicator.set_active(active)
|
||||
|
||||
class HobNotebook(gtk.Notebook):
|
||||
def __init__(self):
|
||||
gtk.Notebook.__init__(self)
|
||||
self.set_property('homogeneous', True)
|
||||
|
||||
self.pages = []
|
||||
|
||||
self.search = None
|
||||
self.search_focus = False
|
||||
self.page_changed = False
|
||||
|
||||
self.connect("switch-page", self.page_changed_cb)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def page_changed_cb(self, nb, page, page_num):
|
||||
for p, lbl in enumerate(self.pages):
|
||||
if p == page_num:
|
||||
lbl.set_active()
|
||||
else:
|
||||
lbl.set_active(False)
|
||||
|
||||
if self.search:
|
||||
self.page_changed = True
|
||||
self.reset_entry(self.search, page_num)
|
||||
|
||||
def append_page(self, child, tab_label, tab_tooltip=None):
|
||||
label = HobTabLabel(tab_label)
|
||||
if tab_tooltip:
|
||||
label.set_tooltip_text(tab_tooltip)
|
||||
label.set_active(False)
|
||||
self.pages.append(label)
|
||||
gtk.Notebook.append_page(self, child, label)
|
||||
|
||||
def set_entry(self, names, tips):
|
||||
self.search = gtk.Entry()
|
||||
self.search_names = names
|
||||
self.search_tips = tips
|
||||
style = self.search.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
|
||||
self.search.set_style(style)
|
||||
self.search.set_text(names[0])
|
||||
self.search.set_tooltip_text(self.search_tips[0])
|
||||
self.search.props.has_tooltip = True
|
||||
|
||||
self.search.set_editable(False)
|
||||
self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
|
||||
self.search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
|
||||
self.search.connect("icon-release", self.set_search_entry_clear_cb)
|
||||
self.search.set_width_chars(30)
|
||||
self.search.show()
|
||||
|
||||
self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
|
||||
self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
|
||||
self.set_action_widget(self.search, gtk.PACK_END)
|
||||
|
||||
def show_indicator_icon(self, title, number):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(number)
|
||||
|
||||
def hide_indicator_icon(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(0)
|
||||
|
||||
def set_search_entry_editable_cb(self, search, event):
|
||||
self.search_focus = True
|
||||
search.set_editable(True)
|
||||
text = search.get_text()
|
||||
if text in self.search_names:
|
||||
search.set_text("")
|
||||
style = self.search.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.BLACK, False, False)
|
||||
search.set_style(style)
|
||||
|
||||
def set_search_entry_reset_cb(self, search, event):
|
||||
page_num = self.get_current_page()
|
||||
text = search.get_text()
|
||||
if not text:
|
||||
self.reset_entry(search, page_num)
|
||||
|
||||
def reset_entry(self, entry, page_num):
|
||||
style = entry.get_style()
|
||||
style.text[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(HobColors.GRAY, False, False)
|
||||
entry.set_style(style)
|
||||
entry.set_text(self.search_names[page_num])
|
||||
entry.set_tooltip_text(self.search_tips[page_num])
|
||||
entry.set_editable(False)
|
||||
entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
|
||||
|
||||
def set_search_entry_clear_cb(self, search, icon_pos, event):
|
||||
if search.get_editable() == True:
|
||||
search.set_text("")
|
||||
search.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
|
||||
search.grab_focus()
|
||||
|
||||
def set_page(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.grab_focus()
|
||||
self.set_current_page(self.pages.index(child))
|
||||
return
|
||||
|
||||
class HobWarpCellRendererText(gtk.CellRendererText):
|
||||
def __init__(self, col_number):
|
||||
gtk.CellRendererText.__init__(self)
|
||||
self.set_property("wrap-mode", pango.WRAP_WORD_CHAR)
|
||||
self.set_property("wrap-width", 300) # default value wrap width is 300
|
||||
self.col_n = col_number
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if widget:
|
||||
self.props.wrap_width = self.get_resized_wrap_width(widget, widget.get_column(self.col_n))
|
||||
return gtk.CellRendererText.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
def get_resized_wrap_width(self, treeview, column):
|
||||
otherCols = []
|
||||
for col in treeview.get_columns():
|
||||
if col != column:
|
||||
otherCols.append(col)
|
||||
adjwidth = treeview.allocation.width - sum(c.get_width() for c in otherCols)
|
||||
adjwidth -= treeview.style_get_property("horizontal-separator") * 4
|
||||
if self.props.wrap_width == adjwidth or adjwidth <= 0:
|
||||
adjwidth = self.props.wrap_width
|
||||
return adjwidth
|
||||
|
||||
gobject.type_register(HobWarpCellRendererText)
|
||||
|
||||
class HobIconChecker(hic):
|
||||
def set_hob_icon_to_stock_icon(self, file_path, stock_id=""):
|
||||
try:
|
||||
pixbuf = gtk.gdk.pixbuf_new_from_file(file_path)
|
||||
except Exception, e:
|
||||
return None
|
||||
|
||||
if stock_id and (gtk.icon_factory_lookup_default(stock_id) == None):
|
||||
icon_factory = gtk.IconFactory()
|
||||
icon_factory.add_default()
|
||||
icon_factory.add(stock_id, gtk.IconSet(pixbuf))
|
||||
gtk.stock_add([(stock_id, '_label', 0, 0, '')])
|
||||
|
||||
return icon_factory.lookup(stock_id)
|
||||
|
||||
return None
|
||||
|
||||
"""
|
||||
For make hob icon consistently by request, and avoid icon view diff by system or gtk version, we use some 'hob icon' to replace the 'gtk icon'.
|
||||
this function check the stock_id and make hob_id to replaced the gtk_id then return it or ""
|
||||
"""
|
||||
def check_stock_icon(self, stock_name=""):
|
||||
HOB_CHECK_STOCK_NAME = {
|
||||
('hic-dialog-info', 'gtk-dialog-info', 'dialog-info') : self.ICON_INDI_INFO_FILE,
|
||||
('hic-ok', 'gtk-ok', 'ok') : self.ICON_INDI_TICK_FILE,
|
||||
('hic-dialog-error', 'gtk-dialog-error', 'dialog-error') : self.ICON_INDI_ERROR_FILE,
|
||||
('hic-dialog-warning', 'gtk-dialog-warning', 'dialog-warning') : self.ICON_INDI_ALERT_FILE,
|
||||
('hic-task-refresh', 'gtk-execute', 'execute') : self.ICON_INDI_REFRESH_FILE,
|
||||
}
|
||||
valid_stock_id = stock_name
|
||||
if stock_name:
|
||||
for names, path in HOB_CHECK_STOCK_NAME.iteritems():
|
||||
if stock_name in names:
|
||||
valid_stock_id = names[0]
|
||||
if not gtk.icon_factory_lookup_default(valid_stock_id):
|
||||
self.set_hob_icon_to_stock_icon(path, valid_stock_id)
|
||||
|
||||
return valid_stock_id
|
||||
|
||||
class HobCellRendererController(gobject.GObject):
|
||||
(MODE_CYCLE_RUNNING, MODE_ONE_SHORT) = range(2)
|
||||
__gsignals__ = {
|
||||
"run-timer-stopped" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
def __init__(self, runningmode=MODE_CYCLE_RUNNING, is_draw_row=False):
|
||||
gobject.GObject.__init__(self)
|
||||
self.timeout_id = None
|
||||
self.current_angle_pos = 0.0
|
||||
self.step_angle = 0.0
|
||||
self.tree_headers_height = 0
|
||||
self.running_cell_areas = []
|
||||
self.running_mode = runningmode
|
||||
self.is_queue_draw_row_area = is_draw_row
|
||||
self.force_stop_enable = False
|
||||
|
||||
def is_active(self):
|
||||
if self.timeout_id:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def reset_run(self):
|
||||
self.force_stop()
|
||||
self.running_cell_areas = []
|
||||
self.current_angle_pos = 0.0
|
||||
self.step_angle = 0.0
|
||||
|
||||
''' time_iterval: (1~1000)ms, which will be as the basic interval count for timer
|
||||
init_usrdata: the current data which related the progress-bar will be at
|
||||
min_usrdata: the range of min of user data
|
||||
max_usrdata: the range of max of user data
|
||||
step: each step which you want to progress
|
||||
Note: the init_usrdata should in the range of from min to max, and max should > min
|
||||
step should < (max - min)
|
||||
'''
|
||||
def start_run(self, time_iterval, init_usrdata, min_usrdata, max_usrdata, step, tree):
|
||||
if (not time_iterval) or (not max_usrdata):
|
||||
return
|
||||
usr_range = (max_usrdata - min_usrdata) * 1.0
|
||||
self.current_angle_pos = (init_usrdata * 1.0) / usr_range
|
||||
self.step_angle = (step * 1) / usr_range
|
||||
self.timeout_id = gobject.timeout_add(int(time_iterval),
|
||||
self.make_image_on_progressing_cb, tree)
|
||||
self.tree_headers_height = self.get_treeview_headers_height(tree)
|
||||
self.force_stop_enable = False
|
||||
|
||||
def force_stop(self):
|
||||
self.emit("run-timer-stopped")
|
||||
self.force_stop_enable = True
|
||||
if self.timeout_id:
|
||||
if gobject.source_remove(self.timeout_id):
|
||||
self.timeout_id = None
|
||||
|
||||
def on_draw_pixbuf_cb(self, pixbuf, cr, x, y, img_width, img_height, do_refresh=True):
|
||||
if pixbuf:
|
||||
r = max(img_width/2, img_height/2)
|
||||
cr.translate(x + r, y + r)
|
||||
if do_refresh:
|
||||
cr.rotate(2 * math.pi * self.current_angle_pos)
|
||||
|
||||
cr.set_source_pixbuf(pixbuf, -img_width/2, -img_height/2)
|
||||
cr.paint()
|
||||
|
||||
def on_draw_fadeinout_cb(self, cr, color, x, y, width, height, do_fadeout=True):
|
||||
if do_fadeout:
|
||||
alpha = self.current_angle_pos * 0.8
|
||||
else:
|
||||
alpha = (1.0 - self.current_angle_pos) * 0.8
|
||||
|
||||
cr.set_source_rgba(color.red, color.green, color.blue, alpha)
|
||||
cr.rectangle(x, y, width, height)
|
||||
cr.fill()
|
||||
|
||||
def get_treeview_headers_height(self, tree):
|
||||
if tree and (tree.get_property("headers-visible") == True):
|
||||
height = tree.get_allocation().height - tree.get_bin_window().get_size()[1]
|
||||
return height
|
||||
|
||||
return 0
|
||||
|
||||
def make_image_on_progressing_cb(self, tree):
|
||||
self.current_angle_pos += self.step_angle
|
||||
if self.running_mode == self.MODE_CYCLE_RUNNING:
|
||||
if (self.current_angle_pos >= 1):
|
||||
self.current_angle_pos = 0
|
||||
else:
|
||||
if self.current_angle_pos > 1:
|
||||
self.force_stop()
|
||||
return False
|
||||
|
||||
if self.is_queue_draw_row_area:
|
||||
for path in self.running_cell_areas:
|
||||
rect = tree.get_cell_area(path, tree.get_column(0))
|
||||
row_x, _, row_width, _ = tree.get_visible_rect()
|
||||
tree.queue_draw_area(row_x, rect.y + self.tree_headers_height, row_width, rect.height)
|
||||
else:
|
||||
for rect in self.running_cell_areas:
|
||||
tree.queue_draw_area(rect.x, rect.y + self.tree_headers_height, rect.width, rect.height)
|
||||
|
||||
return (not self.force_stop_enable)
|
||||
|
||||
def append_running_cell_area(self, cell_area):
|
||||
if cell_area and (cell_area not in self.running_cell_areas):
|
||||
self.running_cell_areas.append(cell_area)
|
||||
|
||||
def remove_running_cell_area(self, cell_area):
|
||||
if cell_area in self.running_cell_areas:
|
||||
self.running_cell_areas.remove(cell_area)
|
||||
if not self.running_cell_areas:
|
||||
self.reset_run()
|
||||
|
||||
gobject.type_register(HobCellRendererController)
|
||||
|
||||
class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
|
||||
def __init__(self):
|
||||
gtk.CellRendererPixbuf.__init__(self)
|
||||
self.control = HobCellRendererController()
|
||||
# add icon checker for make the gtk-icon transfer to hob-icon
|
||||
self.checker = HobIconChecker()
|
||||
self.set_property("stock-size", gtk.ICON_SIZE_DND)
|
||||
|
||||
def get_pixbuf_from_stock_icon(self, widget, stock_id="", size=gtk.ICON_SIZE_DIALOG):
|
||||
if widget and stock_id and gtk.icon_factory_lookup_default(stock_id):
|
||||
return widget.render_icon(stock_id, size)
|
||||
|
||||
return None
|
||||
|
||||
def set_icon_name_to_id(self, new_name):
|
||||
if new_name and type(new_name) == str:
|
||||
# check the name is need to transfer to hob icon or not
|
||||
name = self.checker.check_stock_icon(new_name)
|
||||
if name.startswith("hic") or name.startswith("gtk"):
|
||||
stock_id = name
|
||||
else:
|
||||
stock_id = 'gtk-' + name
|
||||
|
||||
return stock_id
|
||||
|
||||
''' render cell exactly, "icon-name" is priority
|
||||
if use the 'hic-task-refresh' will make the pix animation
|
||||
if 'pix' will change the pixbuf for it from the pixbuf or image.
|
||||
'''
|
||||
def do_render(self, window, tree, background_area,cell_area, expose_area, flags):
|
||||
if (not self.control) or (not tree):
|
||||
return
|
||||
|
||||
x, y, w, h = self.on_get_size(tree, cell_area)
|
||||
x += cell_area.x
|
||||
y += cell_area.y
|
||||
w -= 2 * self.get_property("xpad")
|
||||
h -= 2 * self.get_property("ypad")
|
||||
|
||||
stock_id = ""
|
||||
if self.props.icon_name:
|
||||
stock_id = self.set_icon_name_to_id(self.props.icon_name)
|
||||
elif self.props.stock_id:
|
||||
stock_id = self.props.stock_id
|
||||
elif self.props.pixbuf:
|
||||
pix = self.props.pixbuf
|
||||
else:
|
||||
return
|
||||
|
||||
if stock_id:
|
||||
pix = self.get_pixbuf_from_stock_icon(tree, stock_id, self.props.stock_size)
|
||||
if stock_id == 'hic-task-refresh':
|
||||
self.control.append_running_cell_area(cell_area)
|
||||
if self.control.is_active():
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
|
||||
else:
|
||||
self.control.start_run(200, 0, 0, 1000, 150, tree)
|
||||
else:
|
||||
self.control.remove_running_cell_area(cell_area)
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
|
||||
|
||||
def on_get_size(self, widget, cell_area):
|
||||
if self.props.icon_name or self.props.pixbuf or self.props.stock_id:
|
||||
w, h = gtk.icon_size_lookup(self.props.stock_size)
|
||||
calc_width = self.get_property("xpad") * 2 + w
|
||||
calc_height = self.get_property("ypad") * 2 + h
|
||||
x_offset = 0
|
||||
y_offset = 0
|
||||
if cell_area and w > 0 and h > 0:
|
||||
x_offset = self.get_property("xalign") * (cell_area.width - calc_width - self.get_property("xpad"))
|
||||
y_offset = self.get_property("yalign") * (cell_area.height - calc_height - self.get_property("ypad"))
|
||||
|
||||
return x_offset, y_offset, w, h
|
||||
|
||||
return 0, 0, 0, 0
|
||||
|
||||
gobject.type_register(HobCellRendererPixbuf)
|
||||
|
||||
class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
def __init__(self):
|
||||
gtk.CellRendererToggle.__init__(self)
|
||||
self.ctrl = HobCellRendererController(is_draw_row=True)
|
||||
self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
|
||||
self.cell_attr = {"fadeout": False, "number_of_children": 0}
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if (not self.ctrl) or (not widget):
|
||||
return
|
||||
|
||||
if flags & gtk.CELL_RENDERER_SELECTED:
|
||||
state = gtk.STATE_SELECTED
|
||||
else:
|
||||
state = gtk.STATE_NORMAL
|
||||
|
||||
if self.ctrl.is_active():
|
||||
path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
|
||||
# sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
|
||||
# it's over the tree container range, so the path will be bad
|
||||
if not path: return
|
||||
path = path[0]
|
||||
if path in self.ctrl.running_cell_areas:
|
||||
cr = window.cairo_create()
|
||||
color = widget.get_style().base[state]
|
||||
|
||||
row_x, _, row_width, _ = widget.get_visible_rect()
|
||||
border_y = self.get_property("ypad")
|
||||
self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
|
||||
cell_area.height + border_y * 2, self.cell_attr["fadeout"])
|
||||
# draw number of a group
|
||||
if self.cell_attr["number_of_children"]:
|
||||
text = "%d pkg" % self.cell_attr["number_of_children"]
|
||||
pangolayout = widget.create_pango_layout(text)
|
||||
textw, texth = pangolayout.get_pixel_size()
|
||||
x = cell_area.x + (cell_area.width/2) - (textw/2)
|
||||
y = cell_area.y + (cell_area.height/2) - (texth/2)
|
||||
|
||||
widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
|
||||
else:
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
'''delay: normally delay time is 1000ms
|
||||
cell_list: whilch cells need to be render
|
||||
'''
|
||||
def fadeout(self, tree, delay, cell_list=None):
|
||||
if (delay < 200) or (not tree):
|
||||
return
|
||||
self.cell_attr["fadeout"] = True
|
||||
self.ctrl.running_cell_areas = cell_list
|
||||
self.ctrl.start_run(200, 0, 0, delay, (delay * 200 / 1000), tree)
|
||||
|
||||
def connect_render_state_changed(self, func, usrdata=None):
|
||||
if not func:
|
||||
return
|
||||
if usrdata:
|
||||
self.ctrl.connect("run-timer-stopped", func, self, usrdata)
|
||||
else:
|
||||
self.ctrl.connect("run-timer-stopped", func, self)
|
||||
|
||||
gobject.type_register(HobCellRendererToggle)
|
||||
186
bitbake/lib/bb/ui/crumbs/persistenttooltip.py
Normal file
186
bitbake/lib/bb/ui/crumbs/persistenttooltip.py
Normal file
@@ -0,0 +1,186 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Joshua Lock <josh@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
try:
|
||||
import gconf
|
||||
except:
|
||||
pass
|
||||
|
||||
class PersistentTooltip(gtk.Window):
|
||||
"""
|
||||
A tooltip which persists once shown until the user dismisses it with the Esc
|
||||
key or by clicking the close button.
|
||||
|
||||
# FIXME: the PersistentTooltip should be disabled when the user clicks anywhere off
|
||||
# it. We can't do this with focus-out-event becuase modal ensures we have focus?
|
||||
|
||||
markup: some Pango text markup to display in the tooltip
|
||||
"""
|
||||
def __init__(self, markup, parent_win=None):
|
||||
gtk.Window.__init__(self, gtk.WINDOW_POPUP)
|
||||
|
||||
# Inherit the system theme for a tooltip
|
||||
style = gtk.rc_get_style_by_paths(gtk.settings_get_default(),
|
||||
'gtk-tooltip', 'gtk-tooltip', gobject.TYPE_NONE)
|
||||
self.set_style(style)
|
||||
|
||||
# The placement of the close button on the tip should reflect how the
|
||||
# window manager of the users system places close buttons. Try to read
|
||||
# the metacity gconf key to determine whether the close button is on the
|
||||
# left or the right.
|
||||
# In the case that we can't determine the users configuration we default
|
||||
# to close buttons being on the right.
|
||||
__button_right = True
|
||||
try:
|
||||
client = gconf.client_get_default()
|
||||
order = client.get_string("/apps/metacity/general/button_layout")
|
||||
if order and order.endswith(":"):
|
||||
__button_right = False
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
# We need to ensure we're only shown once
|
||||
self.shown = False
|
||||
|
||||
# We don't want any WM decorations
|
||||
self.set_decorated(False)
|
||||
# We don't want to show in the taskbar or window switcher
|
||||
self.set_skip_pager_hint(True)
|
||||
self.set_skip_taskbar_hint(True)
|
||||
# We must be modal to ensure we grab focus when presented from a gtk.Dialog
|
||||
self.set_modal(True)
|
||||
|
||||
self.set_border_width(0)
|
||||
self.set_position(gtk.WIN_POS_MOUSE)
|
||||
self.set_opacity(0.95)
|
||||
|
||||
# Ensure a reasonable minimum size
|
||||
self.set_geometry_hints(self, 100, 50)
|
||||
|
||||
# Set this window as a transient window for parent(main window)
|
||||
if parent_win:
|
||||
self.set_transient_for(parent_win)
|
||||
self.set_destroy_with_parent(True)
|
||||
# Draw our label and close buttons
|
||||
hbox = gtk.HBox(False, 0)
|
||||
hbox.show()
|
||||
self.add(hbox)
|
||||
|
||||
img = gtk.Image()
|
||||
img.set_from_stock(gtk.STOCK_CLOSE, gtk.ICON_SIZE_BUTTON)
|
||||
|
||||
self.button = gtk.Button()
|
||||
self.button.set_image(img)
|
||||
self.button.connect("clicked", self._dismiss_cb)
|
||||
self.button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.button.grab_focus()
|
||||
self.button.show()
|
||||
vbox = gtk.VBox(False, 0)
|
||||
vbox.show()
|
||||
vbox.pack_start(self.button, False, False, 0)
|
||||
if __button_right:
|
||||
hbox.pack_end(vbox, True, True, 0)
|
||||
else:
|
||||
hbox.pack_start(vbox, True, True, 0)
|
||||
|
||||
self.set_default(self.button)
|
||||
|
||||
bin = gtk.HBox(True, 6)
|
||||
bin.set_border_width(6)
|
||||
bin.show()
|
||||
self.label = gtk.Label()
|
||||
self.label.set_line_wrap(True)
|
||||
# We want to match the colours of the normal tooltips, as dictated by
|
||||
# the users gtk+-2.0 theme, wherever possible - on some systems this
|
||||
# requires explicitly setting a fg_color for the label which matches the
|
||||
# tooltip_fg_color
|
||||
settings = gtk.settings_get_default()
|
||||
colours = settings.get_property('gtk-color-scheme').split('\n')
|
||||
# remove any empty lines, there's likely to be a trailing one after
|
||||
# calling split on a dictionary-like string
|
||||
colours = filter(None, colours)
|
||||
for col in colours:
|
||||
item, val = col.split(': ')
|
||||
if item == 'tooltip_fg_color':
|
||||
style = self.label.get_style()
|
||||
style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
|
||||
self.label.set_style(style)
|
||||
break # we only care for the tooltip_fg_color
|
||||
|
||||
self.label.set_markup(markup)
|
||||
self.label.show()
|
||||
bin.add(self.label)
|
||||
hbox.pack_end(bin, True, True, 6)
|
||||
|
||||
# add the original URL display for user reference
|
||||
if 'a href' in markup:
|
||||
hbox.set_tooltip_text(self.get_markup_url(markup))
|
||||
hbox.show()
|
||||
|
||||
self.connect("key-press-event", self._catch_esc_cb)
|
||||
|
||||
"""
|
||||
Callback when the PersistentTooltip's close button is clicked.
|
||||
Hides the PersistentTooltip.
|
||||
"""
|
||||
def _dismiss_cb(self, button):
|
||||
self.hide()
|
||||
return True
|
||||
|
||||
"""
|
||||
Callback when the Esc key is detected. Hides the PersistentTooltip.
|
||||
"""
|
||||
def _catch_esc_cb(self, widget, event):
|
||||
keyname = gtk.gdk.keyval_name(event.keyval)
|
||||
if keyname == "Escape":
|
||||
self.hide()
|
||||
return True
|
||||
|
||||
"""
|
||||
Called to present the PersistentTooltip.
|
||||
Overrides the superclasses show() method to include state tracking.
|
||||
"""
|
||||
def show(self):
|
||||
if not self.shown:
|
||||
self.shown = True
|
||||
gtk.Window.show(self)
|
||||
|
||||
"""
|
||||
Called to hide the PersistentTooltip.
|
||||
Overrides the superclasses hide() method to include state tracking.
|
||||
"""
|
||||
def hide(self):
|
||||
self.shown = False
|
||||
gtk.Window.hide(self)
|
||||
|
||||
"""
|
||||
Called to get the hyperlink URL from markup text.
|
||||
"""
|
||||
def get_markup_url(self, markup):
|
||||
url = "http:"
|
||||
if markup and type(markup) == str:
|
||||
s = markup
|
||||
if 'http:' in s:
|
||||
import re
|
||||
url = re.search('(http:[^,\\ "]+)', s).group(0)
|
||||
|
||||
return url
|
||||
23
bitbake/lib/bb/ui/crumbs/progress.py
Normal file
23
bitbake/lib/bb/ui/crumbs/progress.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import gtk
|
||||
|
||||
class ProgressBar(gtk.Dialog):
|
||||
def __init__(self, parent):
|
||||
|
||||
gtk.Dialog.__init__(self, flags=(gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT))
|
||||
self.set_title("Parsing metadata, please wait...")
|
||||
self.set_default_size(500, 0)
|
||||
self.set_transient_for(parent)
|
||||
self.progress = gtk.ProgressBar()
|
||||
self.vbox.pack_start(self.progress)
|
||||
self.show_all()
|
||||
|
||||
def set_text(self, msg):
|
||||
self.progress.set_text(msg)
|
||||
|
||||
def update(self, x, y):
|
||||
self.progress.set_fraction(float(x)/float(y))
|
||||
self.progress.set_text("%2d %%" % (x*100/y))
|
||||
|
||||
def pulse(self):
|
||||
self.progress.set_text("Loading...")
|
||||
self.progress.pulse()
|
||||
59
bitbake/lib/bb/ui/crumbs/progressbar.py
Normal file
59
bitbake/lib/bb/ui/crumbs/progressbar.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2011 Intel Corporation
|
||||
#
|
||||
# Authored by Shane Wang <shane.wang@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class HobProgressBar (gtk.ProgressBar):
|
||||
def __init__(self):
|
||||
gtk.ProgressBar.__init__(self)
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def set_rcstyle(self, status):
|
||||
rcstyle = gtk.RcStyle()
|
||||
rcstyle.fg[2] = gtk.gdk.Color(HobColors.BLACK)
|
||||
if status == "stop":
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.WARNING)
|
||||
elif status == "fail":
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.ERROR)
|
||||
else:
|
||||
rcstyle.bg[3] = gtk.gdk.Color(HobColors.RUNNING)
|
||||
self.modify_style(rcstyle)
|
||||
|
||||
def set_title(self, text=None):
|
||||
if not text:
|
||||
text = ""
|
||||
text += " %.0f%%" % self.percentage
|
||||
self.set_text(text)
|
||||
|
||||
def set_stop_title(self, text=None):
|
||||
if not text:
|
||||
text = ""
|
||||
self.set_text(text)
|
||||
|
||||
def reset(self):
|
||||
self.set_fraction(0)
|
||||
self.set_text("")
|
||||
self.set_rcstyle(True)
|
||||
self.percentage = 0
|
||||
|
||||
def update(self, fraction):
|
||||
self.percentage = int(fraction * 100)
|
||||
self.set_fraction(fraction)
|
||||
606
bitbake/lib/bb/ui/crumbs/puccho.glade
Normal file
606
bitbake/lib/bb/ui/crumbs/puccho.glade
Normal file
@@ -0,0 +1,606 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
|
||||
<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
|
||||
<glade-interface>
|
||||
<widget class="GtkDialog" id="build_dialog">
|
||||
<property name="title" translatable="yes">Start a build</property>
|
||||
<property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
|
||||
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
|
||||
<property name="has_separator">False</property>
|
||||
<child internal-child="vbox">
|
||||
<widget class="GtkVBox" id="dialog-vbox1">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">2</property>
|
||||
<child>
|
||||
<widget class="GtkTable" id="build_table">
|
||||
<property name="visible">True</property>
|
||||
<property name="border_width">6</property>
|
||||
<property name="n_rows">7</property>
|
||||
<property name="n_columns">3</property>
|
||||
<property name="column_spacing">5</property>
|
||||
<property name="row_spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="status_alignment">
|
||||
<property name="visible">True</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkHBox" id="status_hbox">
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkImage" id="status_image">
|
||||
<property name="visible">True</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="stock">gtk-dialog-error</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="status_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes">If you see this text something is wrong...</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="use_underline">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">2</property>
|
||||
<property name="bottom_attach">3</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label2">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Build configuration</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="image_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="image_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Image:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="distribution_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="distribution_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Distribution:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkComboBox" id="machine_combo">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="machine_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Machine:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="refresh_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="sensitive">False</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-refresh</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="location_entry">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="width_chars">32</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">2</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label3">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Location:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label1">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Repository</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment1">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment2">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment3">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">2</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child internal-child="action_area">
|
||||
<widget class="GtkHButtonBox" id="dialog-action_area1">
|
||||
<property name="visible">True</property>
|
||||
<property name="layout_style">GTK_BUTTONBOX_END</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="pack_type">GTK_PACK_END</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<widget class="GtkDialog" id="dialog2">
|
||||
<property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
|
||||
<property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
|
||||
<property name="has_separator">False</property>
|
||||
<child internal-child="vbox">
|
||||
<widget class="GtkVBox" id="dialog-vbox2">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">2</property>
|
||||
<child>
|
||||
<widget class="GtkTable" id="table2">
|
||||
<property name="visible">True</property>
|
||||
<property name="border_width">6</property>
|
||||
<property name="n_rows">7</property>
|
||||
<property name="n_columns">3</property>
|
||||
<property name="column_spacing">6</property>
|
||||
<property name="row_spacing">6</property>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label7">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Repositories</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment4">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="scrolledwindow1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<widget class="GtkTreeView" id="treeview1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="headers_clickable">True</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">2</property>
|
||||
<property name="bottom_attach">3</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="entry1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label9">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="label" translatable="yes"><b>Additional packages</b></property>
|
||||
<property name="use_markup">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">4</property>
|
||||
<property name="bottom_attach">5</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment6">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="xscale">0</property>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label8">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="yalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Location: </property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">1</property>
|
||||
<property name="bottom_attach">2</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment7">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">1</property>
|
||||
<property name="xscale">0</property>
|
||||
<child>
|
||||
<widget class="GtkHButtonBox" id="hbuttonbox1">
|
||||
<property name="visible">True</property>
|
||||
<property name="spacing">5</property>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button7">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-remove</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button6">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-edit</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button5">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-add</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment5">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">3</property>
|
||||
<property name="bottom_attach">4</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkLabel" id="label10">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="yalign">0</property>
|
||||
<property name="xpad">12</property>
|
||||
<property name="label" translatable="yes">Search:</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkEntry" id="entry2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="left_attach">1</property>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">5</property>
|
||||
<property name="bottom_attach">6</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkAlignment" id="alignment8">
|
||||
<property name="visible">True</property>
|
||||
<property name="xalign">0</property>
|
||||
<property name="left_padding">12</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="scrolledwindow2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<widget class="GtkTreeView" id="treeview2">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="headers_clickable">True</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="right_attach">3</property>
|
||||
<property name="top_attach">6</property>
|
||||
<property name="bottom_attach">7</property>
|
||||
<property name="y_options"></property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child internal-child="action_area">
|
||||
<widget class="GtkHButtonBox" id="dialog-action_area2">
|
||||
<property name="visible">True</property>
|
||||
<property name="layout_style">GTK_BUTTONBOX_END</property>
|
||||
<child>
|
||||
<widget class="GtkButton" id="button4">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="label" translatable="yes">gtk-close</property>
|
||||
<property name="use_stock">True</property>
|
||||
<property name="response_id">0</property>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="pack_type">GTK_PACK_END</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
<widget class="GtkWindow" id="main_window">
|
||||
<child>
|
||||
<widget class="GtkVBox" id="main_window_vbox">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<widget class="GtkToolbar" id="main_toolbar">
|
||||
<property name="visible">True</property>
|
||||
<child>
|
||||
<widget class="GtkToolButton" id="main_toolbutton_build">
|
||||
<property name="visible">True</property>
|
||||
<property name="label" translatable="yes">Build</property>
|
||||
<property name="stock_id">gtk-execute</property>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkVPaned" id="vpaned1">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="results_scrolledwindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="resize">False</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<widget class="GtkScrolledWindow" id="progress_scrolledwindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="resize">True</property>
|
||||
<property name="shrink">True</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
<packing>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</widget>
|
||||
</child>
|
||||
</widget>
|
||||
</glade-interface>
|
||||
551
bitbake/lib/bb/ui/crumbs/runningbuild.py
Normal file
551
bitbake/lib/bb/ui/crumbs/runningbuild.py
Normal file
@@ -0,0 +1,551 @@
|
||||
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk
|
||||
import gobject
|
||||
import logging
|
||||
import time
|
||||
import urllib
|
||||
import urllib2
|
||||
import pango
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import HobWarpCellRendererText, HobCellRendererPixbuf
|
||||
|
||||
class RunningBuildModel (gtk.TreeStore):
|
||||
(COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeStore.__init__ (self,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_INT)
|
||||
|
||||
def failure_model_filter(self, model, it):
|
||||
color = model.get(it, self.COL_COLOR)[0]
|
||||
if not color:
|
||||
return False
|
||||
if color == HobColors.ERROR or color == HobColors.WARNING:
|
||||
return True
|
||||
return False
|
||||
|
||||
def failure_model(self):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.failure_model_filter)
|
||||
return model
|
||||
|
||||
def foreach_cell_func(self, model, path, iter, usr_data=None):
|
||||
if model.get_value(iter, self.COL_ICON) == "gtk-execute":
|
||||
model.set(iter, self.COL_ICON, "")
|
||||
|
||||
def close_task_refresh(self):
|
||||
self.foreach(self.foreach_cell_func, None)
|
||||
|
||||
class RunningBuild (gobject.GObject):
|
||||
__gsignals__ = {
|
||||
'build-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-failed' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-complete' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-aborted' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'task-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'log-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'log-warning' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'disk-full' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'no-provider' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'log' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
|
||||
}
|
||||
pids_to_task = {}
|
||||
tasks_to_iter = {}
|
||||
|
||||
def __init__ (self, sequential=False):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.model = RunningBuildModel()
|
||||
self.sequential = sequential
|
||||
self.buildaborted = False
|
||||
|
||||
def reset (self):
|
||||
self.pids_to_task.clear()
|
||||
self.tasks_to_iter.clear()
|
||||
self.model.clear()
|
||||
|
||||
def handle_event (self, event, pbar=None):
|
||||
# Handle an event from the event queue, this may result in updating
|
||||
# the model and thus the UI. Or it may be to tell us that the build
|
||||
# has finished successfully (or not, as the case may be.)
|
||||
|
||||
parent = None
|
||||
pid = 0
|
||||
package = None
|
||||
task = None
|
||||
|
||||
# If we have a pid attached to this message/event try and get the
|
||||
# (package, task) pair for it. If we get that then get the parent iter
|
||||
# for the message.
|
||||
if hasattr(event, 'pid'):
|
||||
pid = event.pid
|
||||
if hasattr(event, 'process'):
|
||||
pid = event.process
|
||||
|
||||
if pid and pid in self.pids_to_task:
|
||||
(package, task) = self.pids_to_task[pid]
|
||||
parent = self.tasks_to_iter[(package, task)]
|
||||
|
||||
if(isinstance(event, logging.LogRecord)):
|
||||
if event.taskpid == 0 or event.levelno > logging.INFO:
|
||||
self.emit("log", "handle", event)
|
||||
# FIXME: this is a hack! More info in Yocto #1433
|
||||
# http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
|
||||
# mask the error message as it's not informative for the user.
|
||||
if event.msg.startswith("Execution of event handler 'run_buildstats' failed"):
|
||||
return
|
||||
|
||||
if (event.levelno < logging.INFO or
|
||||
event.msg.startswith("Running task")):
|
||||
return # don't add these to the list
|
||||
|
||||
if event.levelno >= logging.ERROR:
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
self.emit("log-error")
|
||||
elif event.levelno >= logging.WARNING:
|
||||
icon = "dialog-warning"
|
||||
color = HobColors.WARNING
|
||||
self.emit("log-warning")
|
||||
else:
|
||||
icon = None
|
||||
color = HobColors.OK
|
||||
|
||||
# if we know which package we belong to, we'll append onto its list.
|
||||
# otherwise, we'll jump to the top of the master list
|
||||
if self.sequential or not parent:
|
||||
tree_add = self.model.append
|
||||
else:
|
||||
tree_add = self.model.prepend
|
||||
tree_add(parent,
|
||||
(None,
|
||||
package,
|
||||
task,
|
||||
event.getMessage(),
|
||||
icon,
|
||||
color,
|
||||
0))
|
||||
|
||||
# if there are warnings while processing a package
|
||||
# (parent), mark the task with warning color;
|
||||
# in case there are errors, the updates will be
|
||||
# handled on TaskFailed.
|
||||
if color == HobColors.WARNING and parent:
|
||||
self.model.set(parent, self.model.COL_COLOR, color)
|
||||
if task: #then we have a parent (package), and update it's color
|
||||
self.model.set(self.tasks_to_iter[(package, None)], self.model.COL_COLOR, color)
|
||||
|
||||
elif isinstance(event, bb.build.TaskStarted):
|
||||
(package, task) = (event._package, event._task)
|
||||
|
||||
# Save out this PID.
|
||||
self.pids_to_task[pid] = (package, task)
|
||||
|
||||
# Check if we already have this package in our model. If so then
|
||||
# that can be the parent for the task. Otherwise we create a new
|
||||
# top level for the package.
|
||||
if ((package, None) in self.tasks_to_iter):
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
else:
|
||||
if self.sequential:
|
||||
add = self.model.append
|
||||
else:
|
||||
add = self.model.prepend
|
||||
parent = add(None, (None,
|
||||
package,
|
||||
None,
|
||||
"Package: %s" % (package),
|
||||
None,
|
||||
HobColors.OK,
|
||||
0))
|
||||
self.tasks_to_iter[(package, None)] = parent
|
||||
|
||||
# Because this parent package now has an active child mark it as
|
||||
# such.
|
||||
self.model.set(parent, self.model.COL_ICON, "gtk-execute")
|
||||
parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
|
||||
if parent_color != HobColors.ERROR and parent_color != HobColors.WARNING:
|
||||
self.model.set(parent, self.model.COL_COLOR, HobColors.RUNNING)
|
||||
|
||||
# Add an entry in the model for this task
|
||||
i = self.model.append (parent, (None,
|
||||
package,
|
||||
task,
|
||||
"Task: %s" % (task),
|
||||
"gtk-execute",
|
||||
HobColors.RUNNING,
|
||||
0))
|
||||
|
||||
# update the parent's active task count
|
||||
num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
|
||||
self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
|
||||
|
||||
# Save out the iter so that we can find it when we have a message
|
||||
# that we need to attach to a task.
|
||||
self.tasks_to_iter[(package, task)] = i
|
||||
|
||||
elif isinstance(event, bb.build.TaskBase):
|
||||
self.emit("log", "info", event._message)
|
||||
current = self.tasks_to_iter[(package, task)]
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
|
||||
# remove this task from the parent's active count
|
||||
num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
|
||||
self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
|
||||
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
# Mark the task and parent as failed
|
||||
icon = "dialog-error"
|
||||
color = HobColors.ERROR
|
||||
|
||||
logfile = event.logfile
|
||||
if logfile and os.path.exists(logfile):
|
||||
with open(logfile) as f:
|
||||
logdata = f.read()
|
||||
self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', HobColors.OK, 0))
|
||||
|
||||
for i in (current, parent):
|
||||
self.model.set(i, self.model.COL_ICON, icon,
|
||||
self.model.COL_COLOR, color)
|
||||
else:
|
||||
# Mark the parent package and the task as inactive,
|
||||
# but make sure to preserve error, warnings and active
|
||||
# states
|
||||
parent_color = self.model.get(parent, self.model.COL_COLOR)[0]
|
||||
task_color = self.model.get(current, self.model.COL_COLOR)[0]
|
||||
|
||||
# Mark the task as inactive
|
||||
self.model.set(current, self.model.COL_ICON, None)
|
||||
if task_color != HobColors.ERROR:
|
||||
if task_color == HobColors.WARNING:
|
||||
self.model.set(current, self.model.COL_ICON, 'dialog-warning')
|
||||
else:
|
||||
self.model.set(current, self.model.COL_COLOR, HobColors.OK)
|
||||
|
||||
# Mark the parent as inactive
|
||||
if parent_color != HobColors.ERROR:
|
||||
if parent_color == HobColors.WARNING:
|
||||
self.model.set(parent, self.model.COL_ICON, "dialog-warning")
|
||||
else:
|
||||
self.model.set(parent, self.model.COL_ICON, None)
|
||||
if num_active == 0:
|
||||
self.model.set(parent, self.model.COL_COLOR, HobColors.OK)
|
||||
|
||||
# Clear the iters and the pids since when the task goes away the
|
||||
# pid will no longer be used for messages
|
||||
del self.tasks_to_iter[(package, task)]
|
||||
del self.pids_to_task[pid]
|
||||
|
||||
elif isinstance(event, bb.event.BuildStarted):
|
||||
|
||||
self.emit("build-started")
|
||||
self.model.prepend(None, (None,
|
||||
None,
|
||||
None,
|
||||
"Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
0))
|
||||
if pbar:
|
||||
pbar.update(0, self.progress_total)
|
||||
pbar.set_title(bb.event.getName(event))
|
||||
|
||||
elif isinstance(event, bb.event.BuildCompleted):
|
||||
failures = int (event._failures)
|
||||
self.model.prepend(None, (None,
|
||||
None,
|
||||
None,
|
||||
"Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
|
||||
None,
|
||||
HobColors.OK,
|
||||
0))
|
||||
|
||||
# Emit the appropriate signal depending on the number of failures
|
||||
if self.buildaborted:
|
||||
self.emit ("build-aborted")
|
||||
self.buildaborted = False
|
||||
elif (failures >= 1):
|
||||
self.emit ("build-failed")
|
||||
else:
|
||||
self.emit ("build-succeeded")
|
||||
# Emit a generic "build-complete" signal for things wishing to
|
||||
# handle when the build is finished
|
||||
self.emit("build-complete")
|
||||
# reset the all cell's icon indicator
|
||||
self.model.close_task_refresh()
|
||||
if pbar:
|
||||
pbar.set_text(event.msg)
|
||||
|
||||
elif isinstance(event, bb.event.DiskFull):
|
||||
self.buildaborted = True
|
||||
self.emit("disk-full")
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
self.emit("log", "error", "Command execution failed: %s" % (event.error))
|
||||
if event.error.startswith("Exited with"):
|
||||
# If the command fails with an exit code we're done, emit the
|
||||
# generic signal for the UI to notify the user
|
||||
self.emit("build-complete")
|
||||
# reset the all cell's icon indicator
|
||||
self.model.close_task_refresh()
|
||||
|
||||
elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
|
||||
pbar.set_title("Loading cache")
|
||||
self.progress_total = event.total
|
||||
pbar.update(0, self.progress_total)
|
||||
elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
|
||||
pbar.update(event.current, self.progress_total)
|
||||
elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
|
||||
pbar.update(self.progress_total, self.progress_total)
|
||||
pbar.hide()
|
||||
elif isinstance(event, bb.event.ParseStarted) and pbar:
|
||||
if event.total == 0:
|
||||
return
|
||||
pbar.set_title("Processing recipes")
|
||||
self.progress_total = event.total
|
||||
pbar.update(0, self.progress_total)
|
||||
elif isinstance(event, bb.event.ParseProgress) and pbar:
|
||||
pbar.update(event.current, self.progress_total)
|
||||
elif isinstance(event, bb.event.ParseCompleted) and pbar:
|
||||
pbar.hide()
|
||||
#using runqueue events as many as possible to update the progress bar
|
||||
elif isinstance(event, bb.runqueue.runQueueTaskFailed):
|
||||
self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
|
||||
elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
|
||||
self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
|
||||
% (event.taskid, event.taskstring, event.exitcode))
|
||||
elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
|
||||
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
|
||||
self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
|
||||
(event.stats.completed + event.stats.active + event.stats.failed + 1,
|
||||
event.stats.total, event.taskstring))
|
||||
else:
|
||||
if event.noexec:
|
||||
tasktype = 'noexec task'
|
||||
else:
|
||||
tasktype = 'task'
|
||||
self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
|
||||
(tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
|
||||
event.stats.total, event.taskid, event.taskstring))
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
num_of_completed = event.stats.completed + event.stats.failed
|
||||
message["current"] = num_of_completed
|
||||
message["total"] = event.stats.total
|
||||
message["title"] = ""
|
||||
message["task"] = event.taskstring
|
||||
self.emit("task-started", message)
|
||||
elif isinstance(event, bb.event.MultipleProviders):
|
||||
self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
|
||||
% (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
|
||||
self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
|
||||
elif isinstance(event, bb.event.NoProvider):
|
||||
msg = ""
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
else:
|
||||
r = ""
|
||||
|
||||
extra = ''
|
||||
if not event._reasons:
|
||||
if event._close_matches:
|
||||
extra = ". Close matches:\n %s" % '\n '.join(event._close_matches)
|
||||
|
||||
if event._dependees:
|
||||
msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s\n" % (r, event._item, ", ".join(event._dependees), r, extra)
|
||||
else:
|
||||
msg = "Nothing %sPROVIDES '%s'%s\n" % (r, event._item, extra)
|
||||
if event._reasons:
|
||||
for reason in event._reasons:
|
||||
msg += ("%s\n" % reason)
|
||||
self.emit("no-provider", msg)
|
||||
self.emit("log", "error", msg)
|
||||
elif isinstance(event, bb.event.LogExecTTY):
|
||||
icon = "dialog-warning"
|
||||
color = HobColors.WARNING
|
||||
if self.sequential or not parent:
|
||||
tree_add = self.model.append
|
||||
else:
|
||||
tree_add = self.model.prepend
|
||||
tree_add(parent,
|
||||
(None,
|
||||
package,
|
||||
task,
|
||||
event.msg,
|
||||
icon,
|
||||
color,
|
||||
0))
|
||||
else:
|
||||
if not isinstance(event, (bb.event.BuildBase,
|
||||
bb.event.StampUpdate,
|
||||
bb.event.ConfigParsed,
|
||||
bb.event.RecipeParsed,
|
||||
bb.event.RecipePreFinalise,
|
||||
bb.runqueue.runQueueEvent,
|
||||
bb.runqueue.runQueueExitWait,
|
||||
bb.event.OperationStarted,
|
||||
bb.event.OperationCompleted,
|
||||
bb.event.OperationProgress)):
|
||||
self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
|
||||
|
||||
return
|
||||
|
||||
|
||||
def do_pastebin(text):
|
||||
url = 'http://pastebin.com/api_public.php'
|
||||
params = {'paste_code': text, 'paste_format': 'text'}
|
||||
|
||||
req = urllib2.Request(url, urllib.urlencode(params))
|
||||
response = urllib2.urlopen(req)
|
||||
paste_url = response.read()
|
||||
|
||||
return paste_url
|
||||
|
||||
|
||||
class RunningBuildTreeView (gtk.TreeView):
|
||||
__gsignals__ = {
|
||||
"button_press_event" : "override"
|
||||
}
|
||||
def __init__ (self, readonly=False, hob=False):
|
||||
gtk.TreeView.__init__ (self)
|
||||
self.readonly = readonly
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
# add 'hob' flag because there has not only hob to share this code
|
||||
if hob:
|
||||
renderer = HobCellRendererPixbuf ()
|
||||
else:
|
||||
renderer = gtk.CellRendererPixbuf()
|
||||
col = gtk.TreeViewColumn ("Status", renderer)
|
||||
col.add_attribute (renderer, "icon-name", 4)
|
||||
self.append_column (col)
|
||||
|
||||
# The message of the build.
|
||||
# add 'hob' flag because there has not only hob to share this code
|
||||
if hob:
|
||||
self.message_renderer = HobWarpCellRendererText (col_number=1)
|
||||
else:
|
||||
self.message_renderer = gtk.CellRendererText ()
|
||||
self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
|
||||
self.message_column.add_attribute(self.message_renderer, 'background', 5)
|
||||
self.message_renderer.set_property('editable', (not self.readonly))
|
||||
self.append_column (self.message_column)
|
||||
|
||||
def do_button_press_event(self, event):
|
||||
gtk.TreeView.do_button_press_event(self, event)
|
||||
|
||||
if event.button == 3:
|
||||
selection = super(RunningBuildTreeView, self).get_selection()
|
||||
(model, it) = selection.get_selected()
|
||||
if it is not None:
|
||||
can_paste = model.get(it, model.COL_LOG)[0]
|
||||
if can_paste == 'pastebin':
|
||||
# build a simple menu with a pastebin option
|
||||
menu = gtk.Menu()
|
||||
menuitem = gtk.MenuItem("Copy")
|
||||
menu.append(menuitem)
|
||||
menuitem.connect("activate", self.clipboard_handler, (model, it))
|
||||
menuitem.show()
|
||||
menuitem = gtk.MenuItem("Send log to pastebin")
|
||||
menu.append(menuitem)
|
||||
menuitem.connect("activate", self.pastebin_handler, (model, it))
|
||||
menuitem.show()
|
||||
menu.show()
|
||||
menu.popup(None, None, None, event.button, event.time)
|
||||
|
||||
def _add_to_clipboard(self, clipping):
|
||||
"""
|
||||
Add the contents of clipping to the system clipboard.
|
||||
"""
|
||||
clipboard = gtk.clipboard_get()
|
||||
clipboard.set_text(clipping)
|
||||
clipboard.store()
|
||||
|
||||
def pastebin_handler(self, widget, data):
|
||||
"""
|
||||
Send the log data to pastebin, then add the new paste url to the
|
||||
clipboard.
|
||||
"""
|
||||
(model, it) = data
|
||||
paste_url = do_pastebin(model.get(it, model.COL_MESSAGE)[0])
|
||||
|
||||
# @todo Provide visual feedback to the user that it is done and that
|
||||
# it worked.
|
||||
print paste_url
|
||||
|
||||
self._add_to_clipboard(paste_url)
|
||||
|
||||
def clipboard_handler(self, widget, data):
|
||||
"""
|
||||
"""
|
||||
(model, it) = data
|
||||
message = model.get(it, model.COL_MESSAGE)[0]
|
||||
|
||||
self._add_to_clipboard(message)
|
||||
|
||||
class BuildFailureTreeView(gtk.TreeView):
|
||||
|
||||
def __init__ (self):
|
||||
gtk.TreeView.__init__(self)
|
||||
self.set_rules_hint(False)
|
||||
self.set_headers_visible(False)
|
||||
self.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
|
||||
# The icon that indicates whether we're building or failed.
|
||||
renderer = HobCellRendererPixbuf ()
|
||||
col = gtk.TreeViewColumn ("Status", renderer)
|
||||
col.add_attribute (renderer, "icon-name", RunningBuildModel.COL_ICON)
|
||||
self.append_column (col)
|
||||
|
||||
# The message of the build.
|
||||
self.message_renderer = HobWarpCellRendererText (col_number=1)
|
||||
self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=RunningBuildModel.COL_MESSAGE, background=RunningBuildModel.COL_COLOR)
|
||||
self.append_column (self.message_column)
|
||||
34
bitbake/lib/bb/ui/crumbs/utils.py
Normal file
34
bitbake/lib/bb/ui/crumbs/utils.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#
|
||||
# BitBake UI Utils
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
# This utility method looks for xterm or vte and return the
|
||||
# frist to exist, currently we are keeping this simple, but
|
||||
# we will likely move the oe.terminal implementation into
|
||||
# bitbake which will allow more flexibility.
|
||||
|
||||
import os
|
||||
import bb
|
||||
|
||||
def which_terminal():
|
||||
term = bb.utils.which(os.environ["PATH"], "xterm")
|
||||
if term:
|
||||
return term + " -e "
|
||||
term = bb.utils.which(os.environ["PATH"], "vte")
|
||||
if term:
|
||||
return term + " -c "
|
||||
return None
|
||||
@@ -18,15 +18,14 @@
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import sys
|
||||
import gi
|
||||
gi.require_version('Gtk', '3.0')
|
||||
from gi.repository import Gtk, Gdk, GObject
|
||||
from multiprocessing import Queue
|
||||
import gobject
|
||||
import gtk
|
||||
import Queue
|
||||
import threading
|
||||
from xmlrpc import client
|
||||
import time
|
||||
import xmlrpclib
|
||||
import bb
|
||||
import bb.event
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
|
||||
# Package Model
|
||||
(COL_PKG_NAME) = (0)
|
||||
@@ -36,19 +35,19 @@ import bb.event
|
||||
(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
|
||||
|
||||
|
||||
class PackageDepView(Gtk.TreeView):
|
||||
class PackageDepView(gtk.TreeView):
|
||||
def __init__(self, model, dep_type, label):
|
||||
Gtk.TreeView.__init__(self)
|
||||
gtk.TreeView.__init__(self)
|
||||
self.current = None
|
||||
self.dep_type = dep_type
|
||||
self.filter_model = model.filter_new()
|
||||
self.filter_model.set_visible_func(self._filter, data=None)
|
||||
self.filter_model.set_visible_func(self._filter)
|
||||
self.set_model(self.filter_model)
|
||||
self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PACKAGE))
|
||||
#self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
|
||||
|
||||
def _filter(self, model, iter, data):
|
||||
this_type = model[iter][COL_DEP_TYPE]
|
||||
package = model[iter][COL_DEP_PARENT]
|
||||
def _filter(self, model, iter):
|
||||
(this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
|
||||
if this_type != self.dep_type: return False
|
||||
return package == self.current
|
||||
|
||||
@@ -57,17 +56,17 @@ class PackageDepView(Gtk.TreeView):
|
||||
self.filter_model.refilter()
|
||||
|
||||
|
||||
class PackageReverseDepView(Gtk.TreeView):
|
||||
class PackageReverseDepView(gtk.TreeView):
|
||||
def __init__(self, model, label):
|
||||
Gtk.TreeView.__init__(self)
|
||||
gtk.TreeView.__init__(self)
|
||||
self.current = None
|
||||
self.filter_model = model.filter_new()
|
||||
self.filter_model.set_visible_func(self._filter)
|
||||
self.set_model(self.filter_model)
|
||||
self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT))
|
||||
self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
|
||||
|
||||
def _filter(self, model, iter, data):
|
||||
package = model[iter][COL_DEP_PACKAGE]
|
||||
def _filter(self, model, iter):
|
||||
package = model.get_value(iter, COL_DEP_PACKAGE)
|
||||
return package == self.current
|
||||
|
||||
def set_current_package(self, package):
|
||||
@@ -75,50 +74,50 @@ class PackageReverseDepView(Gtk.TreeView):
|
||||
self.filter_model.refilter()
|
||||
|
||||
|
||||
class DepExplorer(Gtk.Window):
|
||||
class DepExplorer(gtk.Window):
|
||||
def __init__(self):
|
||||
Gtk.Window.__init__(self)
|
||||
gtk.Window.__init__(self)
|
||||
self.set_title("Dependency Explorer")
|
||||
self.set_default_size(500, 500)
|
||||
self.connect("delete-event", Gtk.main_quit)
|
||||
self.connect("delete-event", gtk.main_quit)
|
||||
|
||||
# Create the data models
|
||||
self.pkg_model = Gtk.ListStore(GObject.TYPE_STRING)
|
||||
self.pkg_model.set_sort_column_id(COL_PKG_NAME, Gtk.SortType.ASCENDING)
|
||||
self.depends_model = Gtk.ListStore(GObject.TYPE_INT, GObject.TYPE_STRING, GObject.TYPE_STRING)
|
||||
self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, Gtk.SortType.ASCENDING)
|
||||
self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
|
||||
self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
|
||||
self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
|
||||
self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
|
||||
|
||||
pane = Gtk.HPaned()
|
||||
pane = gtk.HPaned()
|
||||
pane.set_position(250)
|
||||
self.add(pane)
|
||||
|
||||
# The master list of packages
|
||||
scrolled = Gtk.ScrolledWindow()
|
||||
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
|
||||
scrolled.set_shadow_type(Gtk.ShadowType.IN)
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
|
||||
self.pkg_treeview = Gtk.TreeView(self.pkg_model)
|
||||
self.pkg_treeview = gtk.TreeView(self.pkg_model)
|
||||
self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
|
||||
column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME)
|
||||
column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
|
||||
self.pkg_treeview.append_column(column)
|
||||
pane.add1(scrolled)
|
||||
scrolled.add(self.pkg_treeview)
|
||||
|
||||
box = Gtk.VBox(homogeneous=True, spacing=4)
|
||||
box = gtk.VBox(homogeneous=True, spacing=4)
|
||||
|
||||
# Runtime Depends
|
||||
scrolled = Gtk.ScrolledWindow()
|
||||
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
|
||||
scrolled.set_shadow_type(Gtk.ShadowType.IN)
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
|
||||
self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
scrolled.add(self.rdep_treeview)
|
||||
box.add(scrolled)
|
||||
|
||||
# Build Depends
|
||||
scrolled = Gtk.ScrolledWindow()
|
||||
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
|
||||
scrolled.set_shadow_type(Gtk.ShadowType.IN)
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
|
||||
self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
|
||||
scrolled.add(self.dep_treeview)
|
||||
@@ -126,9 +125,9 @@ class DepExplorer(Gtk.Window):
|
||||
pane.add2(box)
|
||||
|
||||
# Reverse Depends
|
||||
scrolled = Gtk.ScrolledWindow()
|
||||
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
|
||||
scrolled.set_shadow_type(Gtk.ShadowType.IN)
|
||||
scrolled = gtk.ScrolledWindow()
|
||||
scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
|
||||
scrolled.set_shadow_type(gtk.SHADOW_IN)
|
||||
self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
|
||||
self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
|
||||
scrolled.add(self.revdep_treeview)
|
||||
@@ -184,23 +183,15 @@ class gtkthread(threading.Thread):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(True)
|
||||
self.shutdown = shutdown
|
||||
if not Gtk.init_check()[0]:
|
||||
sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
|
||||
gtkthread.quit.set()
|
||||
|
||||
def run(self):
|
||||
GObject.threads_init()
|
||||
Gdk.threads_init()
|
||||
Gtk.main()
|
||||
gobject.threads_init()
|
||||
gtk.gdk.threads_init()
|
||||
gtk.main()
|
||||
gtkthread.quit.set()
|
||||
|
||||
|
||||
def main(server, eventHandler, params):
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
gtkgui.start()
|
||||
|
||||
try:
|
||||
params.updateFromServer(server)
|
||||
cmdline = params.parseActions()
|
||||
@@ -221,24 +212,31 @@ def main(server, eventHandler, params):
|
||||
elif ret != True:
|
||||
print("Error running command '%s': returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
except client.Fault as x:
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
if gtkthread.quit.isSet():
|
||||
try:
|
||||
gtk.init_check()
|
||||
except RuntimeError:
|
||||
sys.stderr.write("Please set DISPLAY variable before running this command \n")
|
||||
return
|
||||
|
||||
Gdk.threads_enter()
|
||||
shutdown = 0
|
||||
|
||||
gtkgui = gtkthread(shutdown)
|
||||
gtkgui.start()
|
||||
|
||||
gtk.gdk.threads_enter()
|
||||
dep = DepExplorer()
|
||||
bardialog = Gtk.Dialog(parent=dep,
|
||||
flags=Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT)
|
||||
bardialog = gtk.Dialog(parent=dep,
|
||||
flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
bardialog.set_default_size(400, 50)
|
||||
box = bardialog.get_content_area()
|
||||
pbar = Gtk.ProgressBar()
|
||||
box.pack_start(pbar, True, True, 0)
|
||||
pbar = HobProgressBar()
|
||||
bardialog.vbox.pack_start(pbar)
|
||||
bardialog.show_all()
|
||||
bardialog.connect("delete-event", Gtk.main_quit)
|
||||
Gdk.threads_leave()
|
||||
bardialog.connect("delete-event", gtk.main_quit)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
progress_total = 0
|
||||
while True:
|
||||
@@ -255,48 +253,49 @@ def main(server, eventHandler, params):
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadStarted):
|
||||
progress_total = event.total
|
||||
Gdk.threads_enter()
|
||||
gtk.gdk.threads_enter()
|
||||
bardialog.set_title("Loading Cache")
|
||||
pbar.set_fraction(0.0)
|
||||
Gdk.threads_leave()
|
||||
pbar.update(0)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadProgress):
|
||||
x = event.current
|
||||
Gdk.threads_enter()
|
||||
pbar.set_fraction(x * 1.0 / progress_total)
|
||||
Gdk.threads_leave()
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
bardialog.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
progress_total = event.total
|
||||
if progress_total == 0:
|
||||
continue
|
||||
Gdk.threads_enter()
|
||||
pbar.set_fraction(0.0)
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(0)
|
||||
bardialog.set_title("Processing recipes")
|
||||
Gdk.threads_leave()
|
||||
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
x = event.current
|
||||
Gdk.threads_enter()
|
||||
pbar.set_fraction(x * 1.0 / progress_total)
|
||||
Gdk.threads_leave()
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
Gdk.threads_enter()
|
||||
bardialog.set_title("Generating dependency tree")
|
||||
Gdk.threads_leave()
|
||||
bardialog.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.DepTreeGenerated):
|
||||
Gdk.threads_enter()
|
||||
bardialog.hide()
|
||||
gtk.gdk.threads_enter()
|
||||
dep.parse(event._depgraph)
|
||||
Gdk.threads_leave()
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.command.CommandCompleted):
|
||||
continue
|
||||
|
||||
121
bitbake/lib/bb/ui/goggle.py
Normal file
121
bitbake/lib/bb/ui/goggle.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2008 Intel Corporation
|
||||
#
|
||||
# Authored by Rob Bradford <rob@linux.intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gobject
|
||||
import gtk
|
||||
import xmlrpclib
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
|
||||
from bb.ui.crumbs.progress import ProgressBar
|
||||
|
||||
import Queue
|
||||
|
||||
|
||||
def event_handle_idle_func (eventHandler, build, pbar):
|
||||
|
||||
# Consume as many messages as we can in the time available to us
|
||||
event = eventHandler.getEvent()
|
||||
while event:
|
||||
build.handle_event (event, pbar)
|
||||
event = eventHandler.getEvent()
|
||||
|
||||
return True
|
||||
|
||||
def scroll_tv_cb (model, path, iter, view):
|
||||
view.scroll_to_cell (path)
|
||||
|
||||
|
||||
# @todo hook these into the GUI so the user has feedback...
|
||||
def running_build_failed_cb (running_build):
|
||||
pass
|
||||
|
||||
|
||||
def running_build_succeeded_cb (running_build):
|
||||
pass
|
||||
|
||||
|
||||
class MainWindow (gtk.Window):
|
||||
def __init__ (self):
|
||||
gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
|
||||
|
||||
# Setup tree view and the scrolled window
|
||||
scrolled_window = gtk.ScrolledWindow ()
|
||||
self.add (scrolled_window)
|
||||
self.cur_build_tv = RunningBuildTreeView()
|
||||
self.connect("delete-event", gtk.main_quit)
|
||||
self.set_default_size(640, 480)
|
||||
scrolled_window.add (self.cur_build_tv)
|
||||
|
||||
|
||||
def main (server, eventHandler, params):
|
||||
gobject.threads_init()
|
||||
gtk.gdk.threads_init()
|
||||
|
||||
window = MainWindow ()
|
||||
window.show_all ()
|
||||
pbar = ProgressBar(window)
|
||||
pbar.connect("delete-event", gtk.main_quit)
|
||||
|
||||
# Create the object for the current build
|
||||
running_build = RunningBuild ()
|
||||
window.cur_build_tv.set_model (running_build.model)
|
||||
running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
|
||||
running_build.connect ("build-succeeded", running_build_succeeded_cb)
|
||||
running_build.connect ("build-failed", running_build_failed_cb)
|
||||
|
||||
try:
|
||||
params.updateFromServer(server)
|
||||
cmdline = params.parseActions()
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
if 'msg' in cmdline and cmdline['msg']:
|
||||
logger.error(cmdline['msg'])
|
||||
return 1
|
||||
cmdline = cmdline['action']
|
||||
ret, error = server.runCommand(cmdline)
|
||||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
return 1
|
||||
elif ret != True:
|
||||
print("Error running command '%s': returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return 1
|
||||
|
||||
# Use a timeout function for probing the event queue to find out if we
|
||||
# have a message waiting for us.
|
||||
gobject.timeout_add (100,
|
||||
event_handle_idle_func,
|
||||
eventHandler,
|
||||
running_build,
|
||||
pbar)
|
||||
|
||||
try:
|
||||
gtk.main()
|
||||
except EnvironmentError as ioerror:
|
||||
# ignore interrupted io
|
||||
if ioerror.args[0] == 4:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
server.runCommand(["stateForceShutdown"])
|
||||
|
||||
@@ -22,7 +22,7 @@ from __future__ import division
|
||||
|
||||
import os
|
||||
import sys
|
||||
import xmlrpc.client as xmlrpclib
|
||||
import xmlrpclib
|
||||
import logging
|
||||
import progressbar
|
||||
import signal
|
||||
@@ -184,9 +184,8 @@ class TerminalFilter(object):
|
||||
def clearFooter(self):
|
||||
if self.footer_present:
|
||||
lines = self.footer_present
|
||||
sys.stdout.buffer.write(self.curses.tparm(self.cuu, lines))
|
||||
sys.stdout.buffer.write(self.curses.tparm(self.ed))
|
||||
sys.stdout.flush()
|
||||
sys.stdout.write(self.curses.tparm(self.cuu, lines))
|
||||
sys.stdout.write(self.curses.tparm(self.ed))
|
||||
self.footer_present = False
|
||||
|
||||
def updateFooter(self):
|
||||
@@ -279,7 +278,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
server.terminateServer()
|
||||
return
|
||||
|
||||
consolelog = None
|
||||
if consolelogfile and not params.options.show_environment and not params.options.show_versions:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
||||
@@ -352,7 +350,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
tries -= 1
|
||||
if tries:
|
||||
continue
|
||||
logger.warning(event.msg)
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
@@ -379,7 +377,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.build.TaskFailedSilent):
|
||||
logger.warning("Logfile for failed setscene task is %s" % event.logfile)
|
||||
logger.warn("Logfile for failed setscene task is %s" % event.logfile)
|
||||
continue
|
||||
if isinstance(event, bb.build.TaskFailed):
|
||||
return_value = 1
|
||||
@@ -511,8 +509,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.runqueue.sceneQueueTaskFailed):
|
||||
logger.warning("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
|
||||
event.taskid, event.taskstring, event.exitcode)
|
||||
logger.warn("Setscene task %s (%s) failed with exit code '%s' - real task will be run instead",
|
||||
event.taskid, event.taskstring, event.exitcode)
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.DepTreeGenerated):
|
||||
@@ -569,7 +567,6 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
main.shutdown = 2
|
||||
return_value = 1
|
||||
try:
|
||||
termfilter.clearFooter()
|
||||
summary = ""
|
||||
if taskfailures:
|
||||
summary += pluralise("\nSummary: %s task failed:",
|
||||
@@ -594,8 +591,4 @@ def main(server, eventHandler, params, tf = TerminalFilter):
|
||||
if e.errno == errno.EPIPE:
|
||||
pass
|
||||
|
||||
if consolelog:
|
||||
logger.removeHandler(consolelog)
|
||||
consolelog.close()
|
||||
|
||||
return return_value
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
"""
|
||||
|
||||
|
||||
|
||||
from __future__ import division
|
||||
import logging
|
||||
import os, sys, itertools, time, subprocess
|
||||
|
||||
@@ -55,7 +55,7 @@ except ImportError:
|
||||
sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
|
||||
|
||||
import bb
|
||||
import xmlrpc.client
|
||||
import xmlrpclib
|
||||
from bb import ui
|
||||
from bb.ui import uihelper
|
||||
|
||||
@@ -252,7 +252,7 @@ class NCursesUI:
|
||||
elif ret != True:
|
||||
print("Couldn't get default commandlind! %s" % ret)
|
||||
return
|
||||
except xmlrpc.client.Fault as x:
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
|
||||
@@ -331,7 +331,7 @@ class NCursesUI:
|
||||
taw.setText(0, 0, "")
|
||||
if activetasks:
|
||||
taw.appendText("Active Tasks:\n")
|
||||
for task in activetasks.values():
|
||||
for task in activetasks.itervalues():
|
||||
taw.appendText(task["title"] + '\n')
|
||||
if failedtasks:
|
||||
taw.appendText("Failed Tasks:\n")
|
||||
|
||||
@@ -39,7 +39,7 @@ import os
|
||||
# module properties for UI modules are read by bitbake and the contract should not be broken
|
||||
|
||||
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
|
||||
|
||||
logger = logging.getLogger("ToasterLogger")
|
||||
interactive = sys.stdout.isatty()
|
||||
@@ -163,7 +163,7 @@ def main(server, eventHandler, params):
|
||||
inheritlist, _ = server.runCommand(["getVariable", "INHERIT"])
|
||||
|
||||
if not "buildhistory" in inheritlist.split(" "):
|
||||
logger.warning("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
|
||||
build_history_enabled = False
|
||||
|
||||
if not params.observe_only:
|
||||
@@ -433,7 +433,7 @@ def main(server, eventHandler, params):
|
||||
buildinfohelper.store_dependency_information(event)
|
||||
continue
|
||||
|
||||
logger.warning("Unknown event: %s", event)
|
||||
logger.warn("Unknown event: %s", event)
|
||||
return_value += 1
|
||||
|
||||
except EnvironmentError as ioerror:
|
||||
@@ -441,22 +441,7 @@ def main(server, eventHandler, params):
|
||||
if ioerror.args[0] == 4:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
if params.observe_only:
|
||||
print("\nKeyboard Interrupt, exiting observer...")
|
||||
main.shutdown = 2
|
||||
if not params.observe_only and main.shutdown == 1:
|
||||
print("\nSecond Keyboard Interrupt, stopping...\n")
|
||||
_, error = server.runCommand(["stateForceShutdown"])
|
||||
if error:
|
||||
logger.error("Unable to cleanly stop: %s" % error)
|
||||
if not params.observe_only and main.shutdown == 0:
|
||||
print("\nKeyboard Interrupt, closing down...\n")
|
||||
interrupted = True
|
||||
_, error = server.runCommand(["stateShutdown"])
|
||||
if error:
|
||||
logger.error("Unable to cleanly shutdown: %s" % error)
|
||||
buildinfohelper.cancel_cli_build()
|
||||
main.shutdown = main.shutdown + 1
|
||||
main.shutdown = 1
|
||||
except Exception as e:
|
||||
# print errors to log
|
||||
import traceback
|
||||
@@ -476,5 +461,5 @@ def main(server, eventHandler, params):
|
||||
if interrupted and return_value == 0:
|
||||
return_value += 1
|
||||
|
||||
logger.warning("Return value is %d", return_value)
|
||||
logger.warn("Return value is %d", return_value)
|
||||
return return_value
|
||||
|
||||
@@ -25,7 +25,7 @@ client/server deadlocks.
|
||||
"""
|
||||
|
||||
import socket, threading, pickle, collections
|
||||
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
|
||||
|
||||
class BBUIEventQueue:
|
||||
def __init__(self, BBServer, clientinfo=("localhost, 0")):
|
||||
@@ -116,7 +116,7 @@ class BBUIEventQueue:
|
||||
self.server.handle_request()
|
||||
except Exception as e:
|
||||
import traceback
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc()))
|
||||
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
|
||||
|
||||
self.server.server_close()
|
||||
|
||||
@@ -137,7 +137,7 @@ class UIXMLRPCServer (SimpleXMLRPCServer):
|
||||
SimpleXMLRPCServer.__init__( self,
|
||||
interface,
|
||||
requestHandler=SimpleXMLRPCRequestHandler,
|
||||
logRequests=False, allow_none=True, use_builtin_types=True)
|
||||
logRequests=False, allow_none=True)
|
||||
|
||||
def get_request(self):
|
||||
while not self.quit:
|
||||
|
||||
@@ -27,8 +27,6 @@ import bb
|
||||
import bb.msg
|
||||
import multiprocessing
|
||||
import fcntl
|
||||
import imp
|
||||
import itertools
|
||||
import subprocess
|
||||
import glob
|
||||
import fnmatch
|
||||
@@ -36,15 +34,12 @@ import traceback
|
||||
import errno
|
||||
import signal
|
||||
import ast
|
||||
import collections
|
||||
import copy
|
||||
from subprocess import getstatusoutput
|
||||
from commands import getstatusoutput
|
||||
from contextlib import contextmanager
|
||||
from ctypes import cdll
|
||||
|
||||
logger = logging.getLogger("BitBake.Util")
|
||||
python_extensions = [e for e, _, _ in imp.get_suffixes()]
|
||||
|
||||
logger = logging.getLogger("BitBake.Util")
|
||||
|
||||
def clean_context():
|
||||
return {
|
||||
@@ -76,7 +71,7 @@ def explode_version(s):
|
||||
r.append((0, int(m.group(1))))
|
||||
s = m.group(2)
|
||||
continue
|
||||
if s[0] in string.ascii_letters:
|
||||
if s[0] in string.letters:
|
||||
m = alpha_regexp.match(s)
|
||||
r.append((1, m.group(1)))
|
||||
s = m.group(2)
|
||||
@@ -193,7 +188,7 @@ def explode_dep_versions2(s):
|
||||
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
|
||||
and return a dictionary of dependencies and versions.
|
||||
"""
|
||||
r = collections.OrderedDict()
|
||||
r = {}
|
||||
l = s.replace(",", "").split()
|
||||
lastdep = None
|
||||
lastcmp = ""
|
||||
@@ -408,13 +403,8 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
|
||||
def simple_exec(code, context):
|
||||
exec(code, get_context(), context)
|
||||
|
||||
def better_eval(source, locals, extraglobals = None):
|
||||
ctx = get_context()
|
||||
if extraglobals:
|
||||
ctx = copy.copy(ctx)
|
||||
for g in extraglobals:
|
||||
ctx[g] = extraglobals[g]
|
||||
return eval(source, ctx, locals)
|
||||
def better_eval(source, locals):
|
||||
return eval(source, get_context(), locals)
|
||||
|
||||
@contextmanager
|
||||
def fileslocked(files):
|
||||
@@ -573,7 +563,6 @@ def preserved_envvars_exported():
|
||||
'SHELL',
|
||||
'TERM',
|
||||
'USER',
|
||||
'LC_ALL',
|
||||
]
|
||||
|
||||
def preserved_envvars():
|
||||
@@ -593,7 +582,7 @@ def filter_environment(good_vars):
|
||||
"""
|
||||
|
||||
removed_vars = {}
|
||||
for key in list(os.environ):
|
||||
for key in os.environ.keys():
|
||||
if key in good_vars:
|
||||
continue
|
||||
|
||||
@@ -601,12 +590,6 @@ def filter_environment(good_vars):
|
||||
os.unsetenv(key)
|
||||
del os.environ[key]
|
||||
|
||||
# If we spawn a python process, we need to have a UTF-8 locale, else python's file
|
||||
# access methods will use ascii. You can't change that mode once the interpreter is
|
||||
# started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
|
||||
# distros support that and we need to set something.
|
||||
os.environ["LC_ALL"] = "en_US.UTF-8"
|
||||
|
||||
if removed_vars:
|
||||
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
|
||||
|
||||
@@ -646,7 +629,7 @@ def empty_environment():
|
||||
"""
|
||||
Remove all variables from the environment.
|
||||
"""
|
||||
for s in list(os.environ.keys()):
|
||||
for s in os.environ.keys():
|
||||
os.unsetenv(s)
|
||||
del os.environ[s]
|
||||
|
||||
@@ -835,7 +818,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
|
||||
if not sstat:
|
||||
sstat = os.lstat(src)
|
||||
except Exception as e:
|
||||
logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
|
||||
logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
|
||||
return False
|
||||
|
||||
destexists = 1
|
||||
@@ -862,7 +845,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
|
||||
#os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
|
||||
return os.lstat(dest)
|
||||
except Exception as e:
|
||||
logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
|
||||
logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
|
||||
return False
|
||||
|
||||
if stat.S_ISREG(sstat[stat.ST_MODE]):
|
||||
@@ -877,7 +860,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
|
||||
shutil.copyfile(src, dest + "#new")
|
||||
os.rename(dest + "#new", dest)
|
||||
except Exception as e:
|
||||
logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
|
||||
logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
|
||||
return False
|
||||
finally:
|
||||
if srcchown:
|
||||
@@ -888,13 +871,13 @@ def copyfile(src, dest, newmtime = None, sstat = None):
|
||||
#we don't yet handle special, so we need to fall back to /bin/mv
|
||||
a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
|
||||
if a[0] != 0:
|
||||
logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
|
||||
logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
|
||||
return False # failure
|
||||
try:
|
||||
os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
|
||||
os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
|
||||
except Exception as e:
|
||||
logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
|
||||
logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
|
||||
return False
|
||||
|
||||
if newmtime:
|
||||
@@ -963,7 +946,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
if not val:
|
||||
return falsevalue
|
||||
val = set(val.split())
|
||||
if isinstance(checkvalues, str):
|
||||
if isinstance(checkvalues, basestring):
|
||||
checkvalues = set(checkvalues.split())
|
||||
else:
|
||||
checkvalues = set(checkvalues)
|
||||
@@ -976,7 +959,7 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
|
||||
if not val:
|
||||
return falsevalue
|
||||
val = set(val.split())
|
||||
if isinstance(checkvalues, str):
|
||||
if isinstance(checkvalues, basestring):
|
||||
checkvalues = set(checkvalues.split())
|
||||
else:
|
||||
checkvalues = set(checkvalues)
|
||||
@@ -1045,7 +1028,7 @@ def exec_flat_python_func(func, *args, **kwargs):
|
||||
aidx += 1
|
||||
# Handle keyword arguments
|
||||
context.update(kwargs)
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
|
||||
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
|
||||
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
|
||||
comp = bb.utils.better_compile(code, '<string>', '<string>')
|
||||
bb.utils.better_exec(comp, context, code, '<string>')
|
||||
@@ -1132,7 +1115,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
|
||||
else:
|
||||
varset_new = varset_start
|
||||
|
||||
if isinstance(indent, int):
|
||||
if isinstance(indent, (int, long)):
|
||||
if indent == -1:
|
||||
indentspc = ' ' * (len(varset_new) + 2)
|
||||
else:
|
||||
@@ -1200,7 +1183,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
|
||||
in_var = None
|
||||
else:
|
||||
skip = False
|
||||
for (varname, var_re) in var_res.items():
|
||||
for (varname, var_re) in var_res.iteritems():
|
||||
res = var_re.match(line)
|
||||
if res:
|
||||
isfunc = varname.endswith('()')
|
||||
@@ -1378,7 +1361,7 @@ def get_file_layer(filename, d):
|
||||
# Use longest path so we handle nested layers
|
||||
matchlen = 0
|
||||
match = None
|
||||
for collection, regex in collection_res.items():
|
||||
for collection, regex in collection_res.iteritems():
|
||||
if len(regex) > matchlen and re.match(regex, path):
|
||||
matchlen = len(regex)
|
||||
match = collection
|
||||
@@ -1468,29 +1451,3 @@ def export_proxies(d):
|
||||
exported = True
|
||||
|
||||
return exported
|
||||
|
||||
|
||||
def load_plugins(logger, plugins, pluginpath):
|
||||
def load_plugin(name):
|
||||
logger.debug('Loading plugin %s' % name)
|
||||
fp, pathname, description = imp.find_module(name, [pluginpath])
|
||||
try:
|
||||
return imp.load_module(name, fp, pathname, description)
|
||||
finally:
|
||||
if fp:
|
||||
fp.close()
|
||||
|
||||
logger.debug('Loading plugins from %s...' % pluginpath)
|
||||
|
||||
expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
|
||||
for ext in python_extensions)
|
||||
files = itertools.chain.from_iterable(expanded)
|
||||
names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
|
||||
for name in names:
|
||||
if name != '__init__':
|
||||
plugin = load_plugin(name)
|
||||
if hasattr(plugin, 'plugin_init'):
|
||||
obj = plugin.plugin_init(plugins)
|
||||
plugins.append(obj or plugin)
|
||||
else:
|
||||
plugins.append(plugin)
|
||||
|
||||
@@ -1,233 +0,0 @@
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
import bb.utils
|
||||
|
||||
from bblayers.common import LayerPlugin
|
||||
|
||||
logger = logging.getLogger('bitbake-layers')
|
||||
|
||||
|
||||
def plugin_init(plugins):
|
||||
return ActionPlugin()
|
||||
|
||||
|
||||
class ActionPlugin(LayerPlugin):
|
||||
def do_add_layer(self, args):
|
||||
"""Add a layer to bblayers.conf."""
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
if not os.path.exists(layerdir):
|
||||
sys.stderr.write("Specified layer directory doesn't exist\n")
|
||||
return 1
|
||||
|
||||
layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
|
||||
if not os.path.exists(layer_conf):
|
||||
sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
|
||||
return 1
|
||||
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
|
||||
if notadded:
|
||||
for item in notadded:
|
||||
sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
|
||||
|
||||
def do_remove_layer(self, args):
|
||||
"""Remove a layer from bblayers.conf."""
|
||||
bblayers_conf = os.path.join('conf', 'bblayers.conf')
|
||||
if not os.path.exists(bblayers_conf):
|
||||
sys.stderr.write("Unable to find bblayers.conf\n")
|
||||
return 1
|
||||
|
||||
if args.layerdir.startswith('*'):
|
||||
layerdir = args.layerdir
|
||||
elif not '/' in args.layerdir:
|
||||
layerdir = '*/%s' % args.layerdir
|
||||
else:
|
||||
layerdir = os.path.abspath(args.layerdir)
|
||||
(_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
|
||||
if notremoved:
|
||||
for item in notremoved:
|
||||
sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
|
||||
return 1
|
||||
|
||||
def do_flatten(self, args):
|
||||
"""flatten layer configuration into a separate output directory.
|
||||
|
||||
Takes the specified layers (or all layers in the current layer
|
||||
configuration if none are specified) and builds a "flattened" directory
|
||||
containing the contents of all layers, with any overlayed recipes removed
|
||||
and bbappends appended to the corresponding recipes. Note that some manual
|
||||
cleanup may still be necessary afterwards, in particular:
|
||||
|
||||
* where non-recipe files (such as patches) are overwritten (the flatten
|
||||
command will show a warning for these)
|
||||
* where anything beyond the normal layer setup has been added to
|
||||
layer.conf (only the lowest priority number layer's layer.conf is used)
|
||||
* overridden/appended items from bbappends will need to be tidied up
|
||||
* when the flattened layers do not have the same directory structure (the
|
||||
flatten command should show a warning when this will cause a problem)
|
||||
|
||||
Warning: if you flatten several layers where another layer is intended to
|
||||
be used "inbetween" them (in layer priority order) such that recipes /
|
||||
bbappends in the layers interact, and then attempt to use the new output
|
||||
layer together with that other layer, you may no longer get the same
|
||||
build results (as the layer priority order has effectively changed).
|
||||
"""
|
||||
if len(args.layer) == 1:
|
||||
logger.error('If you specify layers to flatten you must specify at least two')
|
||||
return 1
|
||||
|
||||
outputdir = args.outputdir
|
||||
if os.path.exists(outputdir) and os.listdir(outputdir):
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
return 1
|
||||
|
||||
layers = self.bblayers
|
||||
if len(args.layer) > 2:
|
||||
layernames = args.layer
|
||||
found_layernames = []
|
||||
found_layerdirs = []
|
||||
for layerdir in layers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
if layername in layernames:
|
||||
found_layerdirs.append(layerdir)
|
||||
found_layernames.append(layername)
|
||||
|
||||
for layername in layernames:
|
||||
if not layername in found_layernames:
|
||||
logger.error('Unable to find layer %s in current configuration, please run "%s show-layers" to list configured layers' % (layername, os.path.basename(sys.argv[0])))
|
||||
return
|
||||
layers = found_layerdirs
|
||||
else:
|
||||
layernames = []
|
||||
|
||||
# Ensure a specified path matches our list of layers
|
||||
def layer_path_match(path):
|
||||
for layerdir in layers:
|
||||
if path.startswith(os.path.join(layerdir, '')):
|
||||
return layerdir
|
||||
return None
|
||||
|
||||
applied_appends = []
|
||||
for layer in layers:
|
||||
overlayed = []
|
||||
for f in self.tinfoil.cooker.collection.overlayed.keys():
|
||||
for of in self.tinfoil.cooker.collection.overlayed[f]:
|
||||
if of.startswith(layer):
|
||||
overlayed.append(of)
|
||||
|
||||
logger.plain('Copying files from %s...' % layer )
|
||||
for root, dirs, files in os.walk(layer):
|
||||
if '.git' in dirs:
|
||||
dirs.remove('.git')
|
||||
if '.hg' in dirs:
|
||||
dirs.remove('.hg')
|
||||
|
||||
for f1 in files:
|
||||
f1full = os.sep.join([root, f1])
|
||||
if f1full in overlayed:
|
||||
logger.plain(' Skipping overlayed file %s' % f1full )
|
||||
else:
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext != '.bbappend':
|
||||
fdest = f1full[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
if os.path.exists(fdest):
|
||||
if f1 == 'layer.conf' and root.endswith('/conf'):
|
||||
logger.plain(' Skipping layer config file %s' % f1full )
|
||||
continue
|
||||
else:
|
||||
logger.warning('Overwriting file %s', fdest)
|
||||
bb.utils.copyfile(f1full, fdest)
|
||||
if ext == '.bb':
|
||||
for append in self.tinfoil.cooker.collection.get_file_appends(f1full):
|
||||
if layer_path_match(append):
|
||||
logger.plain(' Applying append %s to %s' % (append, fdest))
|
||||
self.apply_append(append, fdest)
|
||||
applied_appends.append(append)
|
||||
|
||||
# Take care of when some layers are excluded and yet we have included bbappends for those recipes
|
||||
for b in self.tinfoil.cooker.collection.bbappends:
|
||||
(recipename, appendname) = b
|
||||
if appendname not in applied_appends:
|
||||
first_append = None
|
||||
layer = layer_path_match(appendname)
|
||||
if layer:
|
||||
if first_append:
|
||||
self.apply_append(appendname, first_append)
|
||||
else:
|
||||
fdest = appendname[len(layer):]
|
||||
fdest = os.path.normpath(os.sep.join([outputdir,fdest]))
|
||||
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||
bb.utils.copyfile(appendname, fdest)
|
||||
first_append = fdest
|
||||
|
||||
# Get the regex for the first layer in our list (which is where the conf/layer.conf file will
|
||||
# have come from)
|
||||
first_regex = None
|
||||
layerdir = layers[0]
|
||||
for layername, pattern, regex, _ in self.tinfoil.cooker.recipecache.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
first_regex = regex
|
||||
break
|
||||
|
||||
if first_regex:
|
||||
# Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
|
||||
bbfiles = str(self.tinfoil.config_data.getVar('BBFILES', True)).split()
|
||||
bbfiles_layer = []
|
||||
for item in bbfiles:
|
||||
if first_regex.match(item):
|
||||
newpath = os.path.join(outputdir, item[len(layerdir)+1:])
|
||||
bbfiles_layer.append(newpath)
|
||||
|
||||
if bbfiles_layer:
|
||||
# Check that all important layer files match BBFILES
|
||||
for root, dirs, files in os.walk(outputdir):
|
||||
for f1 in files:
|
||||
ext = os.path.splitext(f1)[1]
|
||||
if ext in ['.bb', '.bbappend']:
|
||||
f1full = os.sep.join([root, f1])
|
||||
entry_found = False
|
||||
for item in bbfiles_layer:
|
||||
if fnmatch.fnmatch(f1full, item):
|
||||
entry_found = True
|
||||
break
|
||||
if not entry_found:
|
||||
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
|
||||
|
||||
def get_file_layer(self, filename):
|
||||
layerdir = self.get_file_layerdir(filename)
|
||||
if layerdir:
|
||||
return self.get_layer_name(layerdir)
|
||||
else:
|
||||
return '?'
|
||||
|
||||
def get_file_layerdir(self, filename):
|
||||
layer = bb.utils.get_file_layer(filename, self.tinfoil.config_data)
|
||||
return self.bbfile_collections.get(layer, None)
|
||||
|
||||
def apply_append(self, appendname, recipename):
|
||||
with open(appendname, 'r') as appendfile:
|
||||
with open(recipename, 'a') as recipefile:
|
||||
recipefile.write('\n')
|
||||
recipefile.write('##### bbappended from %s #####\n' % self.get_file_layer(appendname))
|
||||
recipefile.writelines(appendfile.readlines())
|
||||
|
||||
def register_commands(self, sp):
|
||||
parser_add_layer = self.add_command(sp, 'add-layer', self.do_add_layer, parserecipes=False)
|
||||
parser_add_layer.add_argument('layerdir', help='Layer directory to add')
|
||||
|
||||
parser_remove_layer = self.add_command(sp, 'remove-layer', self.do_remove_layer, parserecipes=False)
|
||||
parser_remove_layer.add_argument('layerdir', help='Layer directory to remove (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
parser_remove_layer.set_defaults(func=self.do_remove_layer)
|
||||
|
||||
parser_flatten = self.add_command(sp, 'flatten', self.do_flatten)
|
||||
parser_flatten.add_argument('layer', nargs='*', help='Optional layer(s) to flatten (otherwise all are flattened)')
|
||||
parser_flatten.add_argument('outputdir', help='Output directory')
|
||||
@@ -1,33 +0,0 @@
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger('bitbake-layers')
|
||||
|
||||
|
||||
class LayerPlugin():
|
||||
def __init__(self):
|
||||
self.tinfoil = None
|
||||
self.bblayers = []
|
||||
|
||||
def tinfoil_init(self, tinfoil):
|
||||
self.tinfoil = tinfoil
|
||||
self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data)
|
||||
self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()}
|
||||
|
||||
@staticmethod
|
||||
def add_command(subparsers, cmdname, function, parserecipes=True, *args, **kwargs):
|
||||
"""Convert docstring for function to help."""
|
||||
docsplit = function.__doc__.splitlines()
|
||||
help = docsplit[0]
|
||||
if len(docsplit) > 1:
|
||||
desc = '\n'.join(docsplit[1:])
|
||||
else:
|
||||
desc = help
|
||||
subparser = subparsers.add_parser(cmdname, *args, help=help, description=desc, formatter_class=argparse.RawTextHelpFormatter, **kwargs)
|
||||
subparser.set_defaults(func=function, parserecipes=parserecipes)
|
||||
return subparser
|
||||
|
||||
def get_layer_name(self, layerdir):
|
||||
return os.path.basename(layerdir.rstrip(os.sep))
|
||||
@@ -1,270 +0,0 @@
|
||||
import argparse
|
||||
import http.client
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import urllib.parse
|
||||
|
||||
from bblayers.action import ActionPlugin
|
||||
|
||||
logger = logging.getLogger('bitbake-layers')
|
||||
|
||||
|
||||
def plugin_init(plugins):
|
||||
return LayerIndexPlugin()
|
||||
|
||||
|
||||
class LayerIndexPlugin(ActionPlugin):
|
||||
"""Subcommands for interacting with the layer index.
|
||||
|
||||
This class inherits ActionPlugin to get do_add_layer.
|
||||
"""
|
||||
|
||||
def get_json_data(self, apiurl):
|
||||
proxy_settings = os.environ.get("http_proxy", None)
|
||||
conn = None
|
||||
_parsedurl = urllib.parse.urlparse(apiurl)
|
||||
path = _parsedurl.path
|
||||
query = _parsedurl.query
|
||||
|
||||
def parse_url(url):
|
||||
parsedurl = urllib.parse.urlparse(url)
|
||||
if parsedurl.netloc[0] == '[':
|
||||
host, port = parsedurl.netloc[1:].split(']', 1)
|
||||
if ':' in port:
|
||||
port = port.rsplit(':', 1)[1]
|
||||
else:
|
||||
port = None
|
||||
else:
|
||||
if parsedurl.netloc.count(':') == 1:
|
||||
(host, port) = parsedurl.netloc.split(":")
|
||||
else:
|
||||
host = parsedurl.netloc
|
||||
port = None
|
||||
return (host, 80 if port is None else int(port))
|
||||
|
||||
if proxy_settings is None:
|
||||
host, port = parse_url(apiurl)
|
||||
conn = http.client.HTTPConnection(host, port)
|
||||
conn.request("GET", path + "?" + query)
|
||||
else:
|
||||
host, port = parse_url(proxy_settings)
|
||||
conn = http.client.HTTPConnection(host, port)
|
||||
conn.request("GET", apiurl)
|
||||
|
||||
r = conn.getresponse()
|
||||
if r.status != 200:
|
||||
raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
|
||||
return json.loads(r.read())
|
||||
|
||||
def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False):
|
||||
def layeritems_info_id(items_name, layeritems):
|
||||
litems_id = None
|
||||
for li in layeritems:
|
||||
if li['name'] == items_name:
|
||||
litems_id = li['id']
|
||||
break
|
||||
return litems_id
|
||||
|
||||
def layerbranches_info(items_id, layerbranches):
|
||||
lbranch = {}
|
||||
for lb in layerbranches:
|
||||
if lb['layer'] == items_id and lb['branch'] == branchnum:
|
||||
lbranch['id'] = lb['id']
|
||||
lbranch['vcs_subdir'] = lb['vcs_subdir']
|
||||
break
|
||||
return lbranch
|
||||
|
||||
def layerdependencies_info(lb_id, layerdependencies):
|
||||
ld_deps = []
|
||||
for ld in layerdependencies:
|
||||
if ld['layerbranch'] == lb_id and not ld['dependency'] in ld_deps:
|
||||
ld_deps.append(ld['dependency'])
|
||||
if not ld_deps:
|
||||
logger.error("The dependency of layerDependencies is not found.")
|
||||
return ld_deps
|
||||
|
||||
def layeritems_info_name_subdir(items_id, layeritems):
|
||||
litems = {}
|
||||
for li in layeritems:
|
||||
if li['id'] == items_id:
|
||||
litems['vcs_url'] = li['vcs_url']
|
||||
litems['name'] = li['name']
|
||||
break
|
||||
return litems
|
||||
|
||||
if selfname:
|
||||
selfid = layeritems_info_id(layername, layeritems)
|
||||
lbinfo = layerbranches_info(selfid, layerbranches)
|
||||
if lbinfo:
|
||||
selfsubdir = lbinfo['vcs_subdir']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return
|
||||
selfurl = layeritems_info_name_subdir(selfid, layeritems)['vcs_url']
|
||||
if selfurl:
|
||||
return selfurl, selfsubdir
|
||||
else:
|
||||
logger.error("Cannot get layer %s git repo and subdir" % layername)
|
||||
return
|
||||
ldict = {}
|
||||
itemsid = layeritems_info_id(layername, layeritems)
|
||||
if not itemsid:
|
||||
return layername, None
|
||||
lbid = layerbranches_info(itemsid, layerbranches)
|
||||
if lbid:
|
||||
lbid = layerbranches_info(itemsid, layerbranches)['id']
|
||||
else:
|
||||
logger.error("%s is not found in the specified branch" % layername)
|
||||
return None, None
|
||||
for dependency in layerdependencies_info(lbid, layerdependencies):
|
||||
lname = layeritems_info_name_subdir(dependency, layeritems)['name']
|
||||
lurl = layeritems_info_name_subdir(dependency, layeritems)['vcs_url']
|
||||
lsubdir = layerbranches_info(dependency, layerbranches)['vcs_subdir']
|
||||
ldict[lname] = lurl, lsubdir
|
||||
return None, ldict
|
||||
|
||||
def get_fetch_layer(self, fetchdir, url, subdir, fetch_layer):
|
||||
layername = self.get_layer_name(url)
|
||||
if os.path.splitext(layername)[1] == '.git':
|
||||
layername = os.path.splitext(layername)[0]
|
||||
repodir = os.path.join(fetchdir, layername)
|
||||
layerdir = os.path.join(repodir, subdir)
|
||||
if not os.path.exists(repodir):
|
||||
if fetch_layer:
|
||||
result = subprocess.call('git clone %s %s' % (url, repodir), shell = True)
|
||||
if result:
|
||||
logger.error("Failed to download %s" % url)
|
||||
return None, None
|
||||
else:
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.plain("Repository %s needs to be fetched" % url)
|
||||
return layername, layerdir
|
||||
elif os.path.exists(layerdir):
|
||||
return layername, layerdir
|
||||
else:
|
||||
logger.error("%s is not in %s" % (url, subdir))
|
||||
return None, None
|
||||
|
||||
def do_layerindex_fetch(self, args):
|
||||
"""Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf.
|
||||
"""
|
||||
apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True)
|
||||
if not apiurl:
|
||||
logger.error("Cannot get BBLAYERS_LAYERINDEX_URL")
|
||||
return 1
|
||||
else:
|
||||
if apiurl[-1] != '/':
|
||||
apiurl += '/'
|
||||
apiurl += "api/"
|
||||
apilinks = self.get_json_data(apiurl)
|
||||
branches = self.get_json_data(apilinks['branches'])
|
||||
|
||||
branchnum = 0
|
||||
for branch in branches:
|
||||
if branch['name'] == args.branch:
|
||||
branchnum = branch['id']
|
||||
break
|
||||
if branchnum == 0:
|
||||
validbranches = ', '.join([branch['name'] for branch in branches])
|
||||
logger.error('Invalid layer branch name "%s". Valid branches: %s' % (args.branch, validbranches))
|
||||
return 1
|
||||
|
||||
ignore_layers = []
|
||||
for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS', True).split():
|
||||
lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True)
|
||||
if lname:
|
||||
ignore_layers.append(lname)
|
||||
|
||||
if args.ignore:
|
||||
ignore_layers.extend(args.ignore.split(','))
|
||||
|
||||
layeritems = self.get_json_data(apilinks['layerItems'])
|
||||
layerbranches = self.get_json_data(apilinks['layerBranches'])
|
||||
layerdependencies = self.get_json_data(apilinks['layerDependencies'])
|
||||
invaluenames = []
|
||||
repourls = {}
|
||||
printlayers = []
|
||||
|
||||
def query_dependencies(layers, layeritems, layerbranches, layerdependencies, branchnum):
|
||||
depslayer = []
|
||||
for layername in layers:
|
||||
invaluename, layerdict = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
if layerdict:
|
||||
repourls[layername] = self.get_layer_deps(layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=True)
|
||||
for layer in layerdict:
|
||||
if not layer in ignore_layers:
|
||||
depslayer.append(layer)
|
||||
printlayers.append((layername, layer, layerdict[layer][0], layerdict[layer][1]))
|
||||
if not layer in ignore_layers and not layer in repourls:
|
||||
repourls[layer] = (layerdict[layer][0], layerdict[layer][1])
|
||||
if invaluename and not invaluename in invaluenames:
|
||||
invaluenames.append(invaluename)
|
||||
return depslayer
|
||||
|
||||
depslayers = query_dependencies(args.layername, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
while depslayers:
|
||||
depslayer = query_dependencies(depslayers, layeritems, layerbranches, layerdependencies, branchnum)
|
||||
depslayers = depslayer
|
||||
if invaluenames:
|
||||
for invaluename in invaluenames:
|
||||
logger.error('Layer "%s" not found in layer index' % invaluename)
|
||||
return 1
|
||||
logger.plain("%s %s %s %s" % ("Layer".ljust(19), "Required by".ljust(19), "Git repository".ljust(54), "Subdirectory"))
|
||||
logger.plain('=' * 115)
|
||||
for layername in args.layername:
|
||||
layerurl = repourls[layername]
|
||||
logger.plain("%s %s %s %s" % (layername.ljust(20), '-'.ljust(20), layerurl[0].ljust(55), layerurl[1]))
|
||||
printedlayers = []
|
||||
for layer, dependency, gitrepo, subdirectory in printlayers:
|
||||
if dependency in printedlayers:
|
||||
continue
|
||||
logger.plain("%s %s %s %s" % (dependency.ljust(20), layer.ljust(20), gitrepo.ljust(55), subdirectory))
|
||||
printedlayers.append(dependency)
|
||||
|
||||
if repourls:
|
||||
fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR', True)
|
||||
if not fetchdir:
|
||||
logger.error("Cannot get BBLAYERS_FETCH_DIR")
|
||||
return 1
|
||||
if not os.path.exists(fetchdir):
|
||||
os.makedirs(fetchdir)
|
||||
addlayers = []
|
||||
for repourl, subdir in repourls.values():
|
||||
name, layerdir = self.get_fetch_layer(fetchdir, repourl, subdir, not args.show_only)
|
||||
if not name:
|
||||
# Error already shown
|
||||
return 1
|
||||
addlayers.append((subdir, name, layerdir))
|
||||
if not args.show_only:
|
||||
for subdir, name, layerdir in set(addlayers):
|
||||
if os.path.exists(layerdir):
|
||||
if subdir:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % subdir)
|
||||
else:
|
||||
logger.plain("Adding layer \"%s\" to conf/bblayers.conf" % name)
|
||||
localargs = argparse.Namespace()
|
||||
localargs.layerdir = layerdir
|
||||
self.do_add_layer(localargs)
|
||||
else:
|
||||
break
|
||||
|
||||
def do_layerindex_show_depends(self, args):
|
||||
"""Find layer dependencies from layer index.
|
||||
"""
|
||||
args.show_only = True
|
||||
args.ignore = []
|
||||
self.do_layerindex_fetch(args)
|
||||
|
||||
def register_commands(self, sp):
|
||||
parser_layerindex_fetch = self.add_command(sp, 'layerindex-fetch', self.do_layerindex_fetch)
|
||||
parser_layerindex_fetch.add_argument('-n', '--show-only', help='show dependencies and do nothing else', action='store_true')
|
||||
parser_layerindex_fetch.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_fetch.add_argument('-i', '--ignore', help='assume the specified layers do not need to be fetched/added (separate multiple layers with commas, no spaces)', metavar='LAYER')
|
||||
parser_layerindex_fetch.add_argument('layername', nargs='+', help='layer to fetch')
|
||||
|
||||
parser_layerindex_show_depends = self.add_command(sp, 'layerindex-show-depends', self.do_layerindex_show_depends)
|
||||
parser_layerindex_show_depends.add_argument('-b', '--branch', help='branch name to fetch (default %(default)s)', default='master')
|
||||
parser_layerindex_show_depends.add_argument('layername', nargs='+', help='layer to query')
|
||||
@@ -1,500 +0,0 @@
|
||||
import collections
|
||||
import fnmatch
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
import bb.cache
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
|
||||
from bblayers.common import LayerPlugin
|
||||
|
||||
logger = logging.getLogger('bitbake-layers')
|
||||
|
||||
|
||||
def plugin_init(plugins):
|
||||
return QueryPlugin()
|
||||
|
||||
|
||||
class QueryPlugin(LayerPlugin):
|
||||
def do_show_layers(self, args):
|
||||
"""show current configured layers."""
|
||||
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
|
||||
logger.plain('=' * 74)
|
||||
for layer, _, regex, pri in self.tinfoil.cooker.recipecache.bbfile_config_priorities:
|
||||
layerdir = self.bbfile_collections.get(layer, None)
|
||||
layername = self.get_layer_name(layerdir)
|
||||
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), pri))
|
||||
|
||||
def version_str(self, pe, pv, pr = None):
|
||||
verstr = "%s" % pv
|
||||
if pr:
|
||||
verstr = "%s-%s" % (verstr, pr)
|
||||
if pe:
|
||||
verstr = "%s:%s" % (pe, verstr)
|
||||
return verstr
|
||||
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer)
|
||||
|
||||
Lists the names of overlayed recipes and the available versions in each
|
||||
layer, with the preferred version first. Note that skipped recipes that
|
||||
are overlayed will also be listed, with a " (skipped)" suffix.
|
||||
"""
|
||||
|
||||
items_listed = self.list_recipes('Overlayed recipes', None, True, args.same_version, args.filenames, True, None)
|
||||
|
||||
# Check for overlayed .bbclass files
|
||||
classes = collections.defaultdict(list)
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if os.path.exists(classdir):
|
||||
for classfile in os.listdir(classdir):
|
||||
if os.path.splitext(classfile)[1] == '.bbclass':
|
||||
classes[classfile].append(classdir)
|
||||
|
||||
# Locating classes and other files is a bit more complicated than recipes -
|
||||
# layer priority is not a factor; instead BitBake uses the first matching
|
||||
# file in BBPATH, which is manipulated directly by each layer's
|
||||
# conf/layer.conf in turn, thus the order of layers in bblayers.conf is a
|
||||
# factor - however, each layer.conf is free to either prepend or append to
|
||||
# BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
|
||||
# not be exactly the order present in bblayers.conf either.
|
||||
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True))
|
||||
overlayed_class_found = False
|
||||
for (classfile, classdirs) in classes.items():
|
||||
if len(classdirs) > 1:
|
||||
if not overlayed_class_found:
|
||||
logger.plain('=== Overlayed classes ===')
|
||||
overlayed_class_found = True
|
||||
|
||||
mainfile = bb.utils.which(bbpath, os.path.join('classes', classfile))
|
||||
if args.filenames:
|
||||
logger.plain('%s' % mainfile)
|
||||
else:
|
||||
# We effectively have to guess the layer here
|
||||
logger.plain('%s:' % classfile)
|
||||
mainlayername = '?'
|
||||
for layerdir in self.bblayers:
|
||||
classdir = os.path.join(layerdir, 'classes')
|
||||
if mainfile.startswith(classdir):
|
||||
mainlayername = self.get_layer_name(layerdir)
|
||||
logger.plain(' %s' % mainlayername)
|
||||
for classdir in classdirs:
|
||||
fullpath = os.path.join(classdir, classfile)
|
||||
if fullpath != mainfile:
|
||||
if args.filenames:
|
||||
print(' %s' % fullpath)
|
||||
else:
|
||||
print(' %s' % self.get_layer_name(os.path.dirname(classdir)))
|
||||
|
||||
if overlayed_class_found:
|
||||
items_listed = True;
|
||||
|
||||
if not items_listed:
|
||||
logger.plain('No overlayed files found.')
|
||||
|
||||
def do_show_recipes(self, args):
|
||||
"""list available recipes, showing the layer they are provided by
|
||||
|
||||
Lists the names of recipes and the available versions in each
|
||||
layer, with the preferred version first. Optionally you may specify
|
||||
pnspec to match a specified recipe name (supports wildcards). Note that
|
||||
skipped recipes will also be listed, with a " (skipped)" suffix.
|
||||
"""
|
||||
|
||||
inheritlist = args.inherits.split(',') if args.inherits else []
|
||||
if inheritlist or args.pnspec or args.multiple:
|
||||
title = 'Matching recipes:'
|
||||
else:
|
||||
title = 'Available recipes:'
|
||||
self.list_recipes(title, args.pnspec, False, False, args.filenames, args.multiple, inheritlist)
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits):
|
||||
if inherits:
|
||||
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True))
|
||||
for classname in inherits:
|
||||
classfile = 'classes/%s.bbclass' % classname
|
||||
if not bb.utils.which(bbpath, classfile, history=False):
|
||||
logger.error('No class named %s found in BBPATH', classfile)
|
||||
sys.exit(1)
|
||||
|
||||
pkg_pn = self.tinfoil.cooker.recipecache.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.tinfoil.config_data, self.tinfoil.cooker.recipecache, pkg_pn)
|
||||
allproviders = bb.providers.allProviders(self.tinfoil.cooker.recipecache)
|
||||
|
||||
# Ensure we list skipped recipes
|
||||
# We are largely guessing about PN, PV and the preferred version here,
|
||||
# but we have no choice since skipped recipes are not fully parsed
|
||||
skiplist = list(self.tinfoil.cooker.skiplist.keys())
|
||||
skiplist.sort( key=lambda fileitem: self.tinfoil.cooker.collection.calc_bbfile_priority(fileitem) )
|
||||
skiplist.reverse()
|
||||
for fn in skiplist:
|
||||
recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
|
||||
p = recipe_parts[0]
|
||||
if len(recipe_parts) > 1:
|
||||
ver = (None, recipe_parts[1], None)
|
||||
else:
|
||||
ver = (None, 'unknown', None)
|
||||
allproviders[p].append((ver, fn))
|
||||
if not p in pkg_pn:
|
||||
pkg_pn[p] = 'dummy'
|
||||
preferred_versions[p] = (ver, fn)
|
||||
|
||||
def print_item(f, pn, ver, layer, ispref):
|
||||
if f in skiplist:
|
||||
skipped = ' (skipped)'
|
||||
else:
|
||||
skipped = ''
|
||||
if show_filenames:
|
||||
if ispref:
|
||||
logger.plain("%s%s", f, skipped)
|
||||
else:
|
||||
logger.plain(" %s%s", f, skipped)
|
||||
else:
|
||||
if ispref:
|
||||
logger.plain("%s:", pn)
|
||||
logger.plain(" %s %s%s", layer.ljust(20), ver, skipped)
|
||||
|
||||
global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split()
|
||||
cls_re = re.compile('classes/')
|
||||
|
||||
preffiles = []
|
||||
items_listed = False
|
||||
for p in sorted(pkg_pn):
|
||||
if pnspec:
|
||||
if not fnmatch.fnmatch(p, pnspec):
|
||||
continue
|
||||
|
||||
if len(allproviders[p]) > 1 or not show_multi_provider_only:
|
||||
pref = preferred_versions[p]
|
||||
realfn = bb.cache.Cache.virtualfn2realfn(pref[1])
|
||||
preffile = realfn[0]
|
||||
|
||||
# We only display once per recipe, we should prefer non extended versions of the
|
||||
# recipe if present (so e.g. in OpenEmbedded, openssl rather than nativesdk-openssl
|
||||
# which would otherwise sort first).
|
||||
if realfn[1] and realfn[0] in self.tinfoil.cooker.recipecache.pkg_fn:
|
||||
continue
|
||||
|
||||
if inherits:
|
||||
matchcount = 0
|
||||
recipe_inherits = self.tinfoil.cooker_data.inherits.get(preffile, [])
|
||||
for cls in recipe_inherits:
|
||||
if cls_re.match(cls):
|
||||
continue
|
||||
classname = os.path.splitext(os.path.basename(cls))[0]
|
||||
if classname in global_inherit:
|
||||
continue
|
||||
elif classname in inherits:
|
||||
matchcount += 1
|
||||
if matchcount != len(inherits):
|
||||
# No match - skip this recipe
|
||||
continue
|
||||
|
||||
if preffile not in preffiles:
|
||||
preflayer = self.get_file_layer(preffile)
|
||||
multilayer = False
|
||||
same_ver = True
|
||||
provs = []
|
||||
for prov in allproviders[p]:
|
||||
provfile = bb.cache.Cache.virtualfn2realfn(prov[1])[0]
|
||||
provlayer = self.get_file_layer(provfile)
|
||||
provs.append((provfile, provlayer, prov[0]))
|
||||
if provlayer != preflayer:
|
||||
multilayer = True
|
||||
if prov[0] != pref[0]:
|
||||
same_ver = False
|
||||
|
||||
if (multilayer or not show_overlayed_only) and (same_ver or not show_same_ver_only):
|
||||
if not items_listed:
|
||||
logger.plain('=== %s ===' % title)
|
||||
items_listed = True
|
||||
print_item(preffile, p, self.version_str(pref[0][0], pref[0][1]), preflayer, True)
|
||||
for (provfile, provlayer, provver) in provs:
|
||||
if provfile != preffile:
|
||||
print_item(provfile, p, self.version_str(provver[0], provver[1]), provlayer, False)
|
||||
# Ensure we don't show two entries for BBCLASSEXTENDed recipes
|
||||
preffiles.append(preffile)
|
||||
|
||||
return items_listed
|
||||
|
||||
def get_file_layer(self, filename):
|
||||
layerdir = self.get_file_layerdir(filename)
|
||||
if layerdir:
|
||||
return self.get_layer_name(layerdir)
|
||||
else:
|
||||
return '?'
|
||||
|
||||
def get_file_layerdir(self, filename):
|
||||
layer = bb.utils.get_file_layer(filename, self.tinfoil.config_data)
|
||||
return self.bbfile_collections.get(layer, None)
|
||||
|
||||
def remove_layer_prefix(self, f):
|
||||
"""Remove the layer_dir prefix, e.g., f = /path/to/layer_dir/foo/blah, the
|
||||
return value will be: layer_dir/foo/blah"""
|
||||
f_layerdir = self.get_file_layerdir(f)
|
||||
if not f_layerdir:
|
||||
return f
|
||||
prefix = os.path.join(os.path.dirname(f_layerdir), '')
|
||||
return f[len(prefix):] if f.startswith(prefix) else f
|
||||
|
||||
def do_show_appends(self, args):
|
||||
"""list bbappend files and recipe files they apply to
|
||||
|
||||
Lists recipes with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
|
||||
logger.plain('=== Appended recipes ===')
|
||||
|
||||
pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys())
|
||||
pnlist.sort()
|
||||
appends = False
|
||||
for pn in pnlist:
|
||||
if self.show_appends_for_pn(pn):
|
||||
appends = True
|
||||
|
||||
if self.show_appends_for_skipped():
|
||||
appends = True
|
||||
|
||||
if not appends:
|
||||
logger.plain('No append files found')
|
||||
|
||||
def show_appends_for_pn(self, pn):
|
||||
filenames = self.tinfoil.cooker_data.pkg_pn[pn]
|
||||
|
||||
best = bb.providers.findBestProvider(pn,
|
||||
self.tinfoil.config_data,
|
||||
self.tinfoil.cooker_data,
|
||||
self.tinfoil.cooker_data.pkg_pn)
|
||||
best_filename = os.path.basename(best[3])
|
||||
|
||||
return self.show_appends_output(filenames, best_filename)
|
||||
|
||||
def show_appends_for_skipped(self):
|
||||
filenames = [os.path.basename(f)
|
||||
for f in self.tinfoil.cooker.skiplist.keys()]
|
||||
return self.show_appends_output(filenames, None, " (skipped)")
|
||||
|
||||
def show_appends_output(self, filenames, best_filename, name_suffix = ''):
|
||||
appended, missing = self.get_appends_for_files(filenames)
|
||||
if appended:
|
||||
for basename, appends in appended:
|
||||
logger.plain('%s%s:', basename, name_suffix)
|
||||
for append in appends:
|
||||
logger.plain(' %s', append)
|
||||
|
||||
if best_filename:
|
||||
if best_filename in missing:
|
||||
logger.warning('%s: missing append for preferred version',
|
||||
best_filename)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_appends_for_files(self, filenames):
|
||||
appended, notappended = [], []
|
||||
for filename in filenames:
|
||||
_, cls = bb.cache.Cache.virtualfn2realfn(filename)
|
||||
if cls:
|
||||
continue
|
||||
|
||||
basename = os.path.basename(filename)
|
||||
appends = self.tinfoil.cooker.collection.get_file_appends(basename)
|
||||
if appends:
|
||||
appended.append((basename, list(appends)))
|
||||
else:
|
||||
notappended.append(basename)
|
||||
return appended, notappended
|
||||
|
||||
def do_show_cross_depends(self, args):
|
||||
"""Show dependencies between recipes that cross layer boundaries.
|
||||
|
||||
Figure out the dependencies between recipes that cross layer boundaries.
|
||||
|
||||
NOTE: .bbappend files can impact the dependencies.
|
||||
"""
|
||||
ignore_layers = (args.ignore or '').split(',')
|
||||
|
||||
pkg_fn = self.tinfoil.cooker_data.pkg_fn
|
||||
bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True))
|
||||
self.require_re = re.compile(r"require\s+(.+)")
|
||||
self.include_re = re.compile(r"include\s+(.+)")
|
||||
self.inherit_re = re.compile(r"inherit\s+(.+)")
|
||||
|
||||
global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split()
|
||||
|
||||
# The bb's DEPENDS and RDEPENDS
|
||||
for f in pkg_fn:
|
||||
f = bb.cache.Cache.virtualfn2realfn(f)[0]
|
||||
# Get the layername that the file is in
|
||||
layername = self.get_file_layer(f)
|
||||
|
||||
# The DEPENDS
|
||||
deps = self.tinfoil.cooker_data.deps[f]
|
||||
for pn in deps:
|
||||
if pn in self.tinfoil.cooker_data.pkg_pn:
|
||||
best = bb.providers.findBestProvider(pn,
|
||||
self.tinfoil.config_data,
|
||||
self.tinfoil.cooker_data,
|
||||
self.tinfoil.cooker_data.pkg_pn)
|
||||
self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers)
|
||||
|
||||
# The RDPENDS
|
||||
all_rdeps = self.tinfoil.cooker_data.rundeps[f].values()
|
||||
# Remove the duplicated or null one.
|
||||
sorted_rdeps = {}
|
||||
# The all_rdeps is the list in list, so we need two for loops
|
||||
for k1 in all_rdeps:
|
||||
for k2 in k1:
|
||||
sorted_rdeps[k2] = 1
|
||||
all_rdeps = sorted_rdeps.keys()
|
||||
for rdep in all_rdeps:
|
||||
all_p = bb.providers.getRuntimeProviders(self.tinfoil.cooker_data, rdep)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rdep,
|
||||
self.tinfoil.config_data,
|
||||
self.tinfoil.cooker_data)[0][0]
|
||||
self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
|
||||
# The RRECOMMENDS
|
||||
all_rrecs = self.tinfoil.cooker_data.runrecs[f].values()
|
||||
# Remove the duplicated or null one.
|
||||
sorted_rrecs = {}
|
||||
# The all_rrecs is the list in list, so we need two for loops
|
||||
for k1 in all_rrecs:
|
||||
for k2 in k1:
|
||||
sorted_rrecs[k2] = 1
|
||||
all_rrecs = sorted_rrecs.keys()
|
||||
for rrec in all_rrecs:
|
||||
all_p = bb.providers.getRuntimeProviders(self.tinfoil.cooker_data, rrec)
|
||||
if all_p:
|
||||
if f in all_p:
|
||||
# The recipe provides this one itself, ignore
|
||||
continue
|
||||
best = bb.providers.filterProvidersRunTime(all_p, rrec,
|
||||
self.tinfoil.config_data,
|
||||
self.tinfoil.cooker_data)[0][0]
|
||||
self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers)
|
||||
|
||||
# The inherit class
|
||||
cls_re = re.compile('classes/')
|
||||
if f in self.tinfoil.cooker_data.inherits:
|
||||
inherits = self.tinfoil.cooker_data.inherits[f]
|
||||
for cls in inherits:
|
||||
# The inherits' format is [classes/cls, /path/to/classes/cls]
|
||||
# ignore the classes/cls.
|
||||
if not cls_re.match(cls):
|
||||
classname = os.path.splitext(os.path.basename(cls))[0]
|
||||
if classname in global_inherit:
|
||||
continue
|
||||
inherit_layername = self.get_file_layer(cls)
|
||||
if inherit_layername != layername and not inherit_layername in ignore_layers:
|
||||
if not args.filenames:
|
||||
f_short = self.remove_layer_prefix(f)
|
||||
cls = self.remove_layer_prefix(cls)
|
||||
else:
|
||||
f_short = f
|
||||
logger.plain("%s inherits %s" % (f_short, cls))
|
||||
|
||||
# The 'require/include xxx' in the bb file
|
||||
pv_re = re.compile(r"\${PV}")
|
||||
with open(f, 'r') as fnfile:
|
||||
line = fnfile.readline()
|
||||
while line:
|
||||
m, keyword = self.match_require_include(line)
|
||||
# Found the 'require/include xxxx'
|
||||
if m:
|
||||
needed_file = m.group(1)
|
||||
# Replace the ${PV} with the real PV
|
||||
if pv_re.search(needed_file) and f in self.tinfoil.cooker_data.pkg_pepvpr:
|
||||
pv = self.tinfoil.cooker_data.pkg_pepvpr[f][1]
|
||||
needed_file = re.sub(r"\${PV}", pv, needed_file)
|
||||
self.print_cross_files(bbpath, keyword, layername, f, needed_file, args.filenames, ignore_layers)
|
||||
line = fnfile.readline()
|
||||
|
||||
# The "require/include xxx" in conf/machine/*.conf, .inc and .bbclass
|
||||
conf_re = re.compile(".*/conf/machine/[^\/]*\.conf$")
|
||||
inc_re = re.compile(".*\.inc$")
|
||||
# The "inherit xxx" in .bbclass
|
||||
bbclass_re = re.compile(".*\.bbclass$")
|
||||
for layerdir in self.bblayers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
for dirpath, dirnames, filenames in os.walk(layerdir):
|
||||
for name in filenames:
|
||||
f = os.path.join(dirpath, name)
|
||||
s = conf_re.match(f) or inc_re.match(f) or bbclass_re.match(f)
|
||||
if s:
|
||||
with open(f, 'r') as ffile:
|
||||
line = ffile.readline()
|
||||
while line:
|
||||
m, keyword = self.match_require_include(line)
|
||||
# Only bbclass has the "inherit xxx" here.
|
||||
bbclass=""
|
||||
if not m and f.endswith(".bbclass"):
|
||||
m, keyword = self.match_inherit(line)
|
||||
bbclass=".bbclass"
|
||||
# Find a 'require/include xxxx'
|
||||
if m:
|
||||
self.print_cross_files(bbpath, keyword, layername, f, m.group(1) + bbclass, args.filenames, ignore_layers)
|
||||
line = ffile.readline()
|
||||
|
||||
def print_cross_files(self, bbpath, keyword, layername, f, needed_filename, show_filenames, ignore_layers):
|
||||
"""Print the depends that crosses a layer boundary"""
|
||||
needed_file = bb.utils.which(bbpath, needed_filename)
|
||||
if needed_file:
|
||||
# Which layer is this file from
|
||||
needed_layername = self.get_file_layer(needed_file)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
needed_file = self.remove_layer_prefix(needed_file)
|
||||
logger.plain("%s %s %s" %(f, keyword, needed_file))
|
||||
|
||||
def match_inherit(self, line):
|
||||
"""Match the inherit xxx line"""
|
||||
return (self.inherit_re.match(line), "inherits")
|
||||
|
||||
def match_require_include(self, line):
|
||||
"""Match the require/include xxx line"""
|
||||
m = self.require_re.match(line)
|
||||
keyword = "requires"
|
||||
if not m:
|
||||
m = self.include_re.match(line)
|
||||
keyword = "includes"
|
||||
return (m, keyword)
|
||||
|
||||
def check_cross_depends(self, keyword, layername, f, needed_file, show_filenames, ignore_layers):
|
||||
"""Print the DEPENDS/RDEPENDS file that crosses a layer boundary"""
|
||||
best_realfn = bb.cache.Cache.virtualfn2realfn(needed_file)[0]
|
||||
needed_layername = self.get_file_layer(best_realfn)
|
||||
if needed_layername != layername and not needed_layername in ignore_layers:
|
||||
if not show_filenames:
|
||||
f = self.remove_layer_prefix(f)
|
||||
best_realfn = self.remove_layer_prefix(best_realfn)
|
||||
|
||||
logger.plain("%s %s %s" % (f, keyword, best_realfn))
|
||||
|
||||
def register_commands(self, sp):
|
||||
self.add_command(sp, 'show-layers', self.do_show_layers, parserecipes=False)
|
||||
|
||||
parser_show_overlayed = self.add_command(sp, 'show-overlayed', self.do_show_overlayed)
|
||||
parser_show_overlayed.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_overlayed.add_argument('-s', '--same-version', help='only list overlayed recipes where the version is the same', action='store_true')
|
||||
|
||||
parser_show_recipes = self.add_command(sp, 'show-recipes', self.do_show_recipes)
|
||||
parser_show_recipes.add_argument('-f', '--filenames', help='instead of the default formatting, list filenames of higher priority recipes with the ones they overlay indented underneath', action='store_true')
|
||||
parser_show_recipes.add_argument('-m', '--multiple', help='only list where multiple recipes (in the same layer or different layers) exist for the same recipe name', action='store_true')
|
||||
parser_show_recipes.add_argument('-i', '--inherits', help='only list recipes that inherit the named class', metavar='CLASS', default='')
|
||||
parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)')
|
||||
|
||||
self.add_command(sp, 'show-appends', self.do_show_appends)
|
||||
|
||||
parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends)
|
||||
parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true')
|
||||
parser_show_cross_depends.add_argument('-i', '--ignore', help='ignore dependencies on items in the specified layer(s) (split multiple layer names with commas, no spaces)', metavar='LAYERNAME')
|
||||
@@ -17,8 +17,8 @@ http://www.crummy.com/software/BeautifulSoup/bs4/doc/
|
||||
"""
|
||||
|
||||
__author__ = "Leonard Richardson (leonardr@segfault.org)"
|
||||
__version__ = "4.4.1"
|
||||
__copyright__ = "Copyright (c) 2004-2015 Leonard Richardson"
|
||||
__version__ = "4.3.2"
|
||||
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
|
||||
__license__ = "MIT"
|
||||
|
||||
__all__ = ['BeautifulSoup']
|
||||
@@ -45,7 +45,7 @@ from .element import (
|
||||
|
||||
# The very first thing we do is give a useful error if someone is
|
||||
# running this code under Python 3 without converting it.
|
||||
'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
|
||||
|
||||
class BeautifulSoup(Tag):
|
||||
"""
|
||||
@@ -69,7 +69,7 @@ class BeautifulSoup(Tag):
|
||||
like HTML's <br> tag), call handle_starttag and then
|
||||
handle_endtag.
|
||||
"""
|
||||
ROOT_TAG_NAME = '[document]'
|
||||
ROOT_TAG_NAME = u'[document]'
|
||||
|
||||
# If the end-user gives no indication which tree builder they
|
||||
# want, look for one with these features.
|
||||
@@ -77,11 +77,8 @@ class BeautifulSoup(Tag):
|
||||
|
||||
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
|
||||
|
||||
NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n"
|
||||
|
||||
def __init__(self, markup="", features=None, builder=None,
|
||||
parse_only=None, from_encoding=None, exclude_encodings=None,
|
||||
**kwargs):
|
||||
parse_only=None, from_encoding=None, **kwargs):
|
||||
"""The Soup object is initialized as the 'root tag', and the
|
||||
provided markup (which can be a string or a file-like object)
|
||||
is fed into the underlying parser."""
|
||||
@@ -117,9 +114,9 @@ class BeautifulSoup(Tag):
|
||||
del kwargs['isHTML']
|
||||
warnings.warn(
|
||||
"BS4 does not respect the isHTML argument to the "
|
||||
"BeautifulSoup constructor. Suggest you use "
|
||||
"features='lxml' for HTML and features='lxml-xml' for "
|
||||
"XML.")
|
||||
"BeautifulSoup constructor. You can pass in features='html' "
|
||||
"or features='xml' to get a builder capable of handling "
|
||||
"one or the other.")
|
||||
|
||||
def deprecated_argument(old_name, new_name):
|
||||
if old_name in kwargs:
|
||||
@@ -138,13 +135,12 @@ class BeautifulSoup(Tag):
|
||||
"fromEncoding", "from_encoding")
|
||||
|
||||
if len(kwargs) > 0:
|
||||
arg = list(kwargs.keys()).pop()
|
||||
arg = kwargs.keys().pop()
|
||||
raise TypeError(
|
||||
"__init__() got an unexpected keyword argument '%s'" % arg)
|
||||
|
||||
if builder is None:
|
||||
original_features = features
|
||||
if isinstance(features, str):
|
||||
if isinstance(features, basestring):
|
||||
features = [features]
|
||||
if features is None or len(features) == 0:
|
||||
features = self.DEFAULT_BUILDER_FEATURES
|
||||
@@ -155,16 +151,6 @@ class BeautifulSoup(Tag):
|
||||
"requested: %s. Do you need to install a parser library?"
|
||||
% ",".join(features))
|
||||
builder = builder_class()
|
||||
if not (original_features == builder.NAME or
|
||||
original_features in builder.ALTERNATE_NAMES):
|
||||
if builder.is_xml:
|
||||
markup_type = "XML"
|
||||
else:
|
||||
markup_type = "HTML"
|
||||
warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict(
|
||||
parser=builder.NAME,
|
||||
markup_type=markup_type))
|
||||
|
||||
self.builder = builder
|
||||
self.is_xml = builder.is_xml
|
||||
self.builder.soup = self
|
||||
@@ -178,7 +164,7 @@ class BeautifulSoup(Tag):
|
||||
# involving passing non-markup to Beautiful Soup.
|
||||
# Beautiful Soup will still parse the input as markup,
|
||||
# just in case that's what the user really wants.
|
||||
if (isinstance(markup, str)
|
||||
if (isinstance(markup, unicode)
|
||||
and not os.path.supports_unicode_filenames):
|
||||
possible_filename = markup.encode("utf8")
|
||||
else:
|
||||
@@ -186,30 +172,25 @@ class BeautifulSoup(Tag):
|
||||
is_file = False
|
||||
try:
|
||||
is_file = os.path.exists(possible_filename)
|
||||
except Exception as e:
|
||||
except Exception, e:
|
||||
# This is almost certainly a problem involving
|
||||
# characters not valid in filenames on this
|
||||
# system. Just let it go.
|
||||
pass
|
||||
if is_file:
|
||||
if isinstance(markup, str):
|
||||
markup = markup.encode("utf8")
|
||||
warnings.warn(
|
||||
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
|
||||
if markup[:5] == "http:" or markup[:6] == "https:":
|
||||
# TODO: This is ugly but I couldn't get it to work in
|
||||
# Python 3 otherwise.
|
||||
if ((isinstance(markup, bytes) and not b' ' in markup)
|
||||
or (isinstance(markup, str) and not ' ' in markup)):
|
||||
if isinstance(markup, str):
|
||||
markup = markup.encode("utf8")
|
||||
or (isinstance(markup, unicode) and not u' ' in markup)):
|
||||
warnings.warn(
|
||||
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
|
||||
|
||||
for (self.markup, self.original_encoding, self.declared_html_encoding,
|
||||
self.contains_replacement_characters) in (
|
||||
self.builder.prepare_markup(
|
||||
markup, from_encoding, exclude_encodings=exclude_encodings)):
|
||||
self.builder.prepare_markup(markup, from_encoding)):
|
||||
self.reset()
|
||||
try:
|
||||
self._feed()
|
||||
@@ -222,16 +203,6 @@ class BeautifulSoup(Tag):
|
||||
self.markup = None
|
||||
self.builder.soup = None
|
||||
|
||||
def __copy__(self):
|
||||
return type(self)(self.encode(), builder=self.builder)
|
||||
|
||||
def __getstate__(self):
|
||||
# Frequently a tree builder can't be pickled.
|
||||
d = dict(self.__dict__)
|
||||
if 'builder' in d and not self.builder.picklable:
|
||||
del d['builder']
|
||||
return d
|
||||
|
||||
def _feed(self):
|
||||
# Convert the document to Unicode.
|
||||
self.builder.reset()
|
||||
@@ -258,7 +229,9 @@ class BeautifulSoup(Tag):
|
||||
|
||||
def new_string(self, s, subclass=NavigableString):
|
||||
"""Create a new NavigableString associated with this soup."""
|
||||
return subclass(s)
|
||||
navigable = subclass(s)
|
||||
navigable.setup()
|
||||
return navigable
|
||||
|
||||
def insert_before(self, successor):
|
||||
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
|
||||
@@ -286,7 +259,7 @@ class BeautifulSoup(Tag):
|
||||
|
||||
def endData(self, containerClass=NavigableString):
|
||||
if self.current_data:
|
||||
current_data = ''.join(self.current_data)
|
||||
current_data = u''.join(self.current_data)
|
||||
# If whitespace is not preserved, and this string contains
|
||||
# nothing but ASCII spaces, replace it with a single space
|
||||
# or newline.
|
||||
@@ -317,49 +290,14 @@ class BeautifulSoup(Tag):
|
||||
def object_was_parsed(self, o, parent=None, most_recent_element=None):
|
||||
"""Add an object to the parse tree."""
|
||||
parent = parent or self.currentTag
|
||||
previous_element = most_recent_element or self._most_recent_element
|
||||
|
||||
next_element = previous_sibling = next_sibling = None
|
||||
if isinstance(o, Tag):
|
||||
next_element = o.next_element
|
||||
next_sibling = o.next_sibling
|
||||
previous_sibling = o.previous_sibling
|
||||
if not previous_element:
|
||||
previous_element = o.previous_element
|
||||
|
||||
o.setup(parent, previous_element, next_element, previous_sibling, next_sibling)
|
||||
most_recent_element = most_recent_element or self._most_recent_element
|
||||
o.setup(parent, most_recent_element)
|
||||
|
||||
if most_recent_element is not None:
|
||||
most_recent_element.next_element = o
|
||||
self._most_recent_element = o
|
||||
parent.contents.append(o)
|
||||
|
||||
if parent.next_sibling:
|
||||
# This node is being inserted into an element that has
|
||||
# already been parsed. Deal with any dangling references.
|
||||
index = parent.contents.index(o)
|
||||
if index == 0:
|
||||
previous_element = parent
|
||||
previous_sibling = None
|
||||
else:
|
||||
previous_element = previous_sibling = parent.contents[index-1]
|
||||
if index == len(parent.contents)-1:
|
||||
next_element = parent.next_sibling
|
||||
next_sibling = None
|
||||
else:
|
||||
next_element = next_sibling = parent.contents[index+1]
|
||||
|
||||
o.previous_element = previous_element
|
||||
if previous_element:
|
||||
previous_element.next_element = o
|
||||
o.next_element = next_element
|
||||
if next_element:
|
||||
next_element.previous_element = o
|
||||
o.next_sibling = next_sibling
|
||||
if next_sibling:
|
||||
next_sibling.previous_sibling = o
|
||||
o.previous_sibling = previous_sibling
|
||||
if previous_sibling:
|
||||
previous_sibling.next_sibling = o
|
||||
|
||||
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
|
||||
"""Pops the tag stack up to and including the most recent
|
||||
instance of the given tag. If inclusivePop is false, pops the tag
|
||||
@@ -429,9 +367,9 @@ class BeautifulSoup(Tag):
|
||||
encoding_part = ''
|
||||
if eventual_encoding != None:
|
||||
encoding_part = ' encoding="%s"' % eventual_encoding
|
||||
prefix = '<?xml version="1.0"%s?>\n' % encoding_part
|
||||
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
|
||||
else:
|
||||
prefix = ''
|
||||
prefix = u''
|
||||
if not pretty_print:
|
||||
indent_level = None
|
||||
else:
|
||||
@@ -465,4 +403,4 @@ class FeatureNotFound(ValueError):
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
soup = BeautifulSoup(sys.stdin)
|
||||
print(soup.prettify())
|
||||
print soup.prettify()
|
||||
|
||||
@@ -80,12 +80,9 @@ builder_registry = TreeBuilderRegistry()
|
||||
class TreeBuilder(object):
|
||||
"""Turn a document into a Beautiful Soup object tree."""
|
||||
|
||||
NAME = "[Unknown tree builder]"
|
||||
ALTERNATE_NAMES = []
|
||||
features = []
|
||||
|
||||
is_xml = False
|
||||
picklable = False
|
||||
preserve_whitespace_tags = set()
|
||||
empty_element_tags = None # A tag will be considered an empty-element
|
||||
# tag when and only when it has no contents.
|
||||
@@ -156,13 +153,13 @@ class TreeBuilder(object):
|
||||
universal = self.cdata_list_attributes.get('*', [])
|
||||
tag_specific = self.cdata_list_attributes.get(
|
||||
tag_name.lower(), None)
|
||||
for attr in list(attrs.keys()):
|
||||
for attr in attrs.keys():
|
||||
if attr in universal or (tag_specific and attr in tag_specific):
|
||||
# We have a "class"-type attribute whose string
|
||||
# value is a whitespace-separated list of
|
||||
# values. Split it into a list.
|
||||
value = attrs[attr]
|
||||
if isinstance(value, str):
|
||||
if isinstance(value, basestring):
|
||||
values = whitespace_re.split(value)
|
||||
else:
|
||||
# html5lib sometimes calls setAttributes twice
|
||||
|
||||
@@ -2,7 +2,6 @@ __all__ = [
|
||||
'HTML5TreeBuilder',
|
||||
]
|
||||
|
||||
from pdb import set_trace
|
||||
import warnings
|
||||
from bs4.builder import (
|
||||
PERMISSIVE,
|
||||
@@ -10,12 +9,16 @@ from bs4.builder import (
|
||||
HTML_5,
|
||||
HTMLTreeBuilder,
|
||||
)
|
||||
from bs4.element import (
|
||||
NamespacedAttribute,
|
||||
whitespace_re,
|
||||
)
|
||||
from bs4.element import NamespacedAttribute
|
||||
import html5lib
|
||||
try:
|
||||
# html5lib >= 0.99999999/1.0b9
|
||||
from html5lib.treebuilders import base as treebuildersbase
|
||||
except ImportError:
|
||||
# html5lib <= 0.9999999/1.0b8
|
||||
from html5lib.treebuilders import _base as treebuildersbase
|
||||
from html5lib.constants import namespaces
|
||||
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
@@ -26,20 +29,11 @@ from bs4.element import (
|
||||
class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
"""Use html5lib to build a tree."""
|
||||
|
||||
NAME = "html5lib"
|
||||
features = ['html5lib', PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
features = [NAME, PERMISSIVE, HTML_5, HTML]
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding,
|
||||
document_declared_encoding=None, exclude_encodings=None):
|
||||
def prepare_markup(self, markup, user_specified_encoding):
|
||||
# Store the user-specified encoding for use later on.
|
||||
self.user_specified_encoding = user_specified_encoding
|
||||
|
||||
# document_declared_encoding and exclude_encodings aren't used
|
||||
# ATM because the html5lib TreeBuilder doesn't use
|
||||
# UnicodeDammit.
|
||||
if exclude_encodings:
|
||||
warnings.warn("You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.")
|
||||
yield (markup, None, None, False)
|
||||
|
||||
# These methods are defined by Beautiful Soup.
|
||||
@@ -50,7 +44,7 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
doc = parser.parse(markup, encoding=self.user_specified_encoding)
|
||||
|
||||
# Set the character encoding detected by the tokenizer.
|
||||
if isinstance(markup, str):
|
||||
if isinstance(markup, unicode):
|
||||
# We need to special-case this because html5lib sets
|
||||
# charEncoding to UTF-8 if it gets Unicode input.
|
||||
doc.original_encoding = None
|
||||
@@ -64,10 +58,10 @@ class HTML5TreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return '<html><head></head><body>%s</body></html>' % fragment
|
||||
return u'<html><head></head><body>%s</body></html>' % fragment
|
||||
|
||||
|
||||
class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder):
|
||||
|
||||
def __init__(self, soup, namespaceHTMLElements):
|
||||
self.soup = soup
|
||||
@@ -105,7 +99,7 @@ class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
|
||||
return self.soup
|
||||
|
||||
def getFragment(self):
|
||||
return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
|
||||
return treebuildersbase.TreeBuilder.getFragment(self).element
|
||||
|
||||
class AttrList(object):
|
||||
def __init__(self, element):
|
||||
@@ -114,16 +108,7 @@ class AttrList(object):
|
||||
def __iter__(self):
|
||||
return list(self.attrs.items()).__iter__()
|
||||
def __setitem__(self, name, value):
|
||||
# If this attribute is a multi-valued attribute for this element,
|
||||
# turn its value into a list.
|
||||
list_attr = HTML5TreeBuilder.cdata_list_attributes
|
||||
if (name in list_attr['*']
|
||||
or (self.element.name in list_attr
|
||||
and name in list_attr[self.element.name])):
|
||||
# A node that is being cloned may have already undergone
|
||||
# this procedure.
|
||||
if not isinstance(value, list):
|
||||
value = whitespace_re.split(value)
|
||||
"set attr", name, value
|
||||
self.element[name] = value
|
||||
def items(self):
|
||||
return list(self.attrs.items())
|
||||
@@ -137,16 +122,16 @@ class AttrList(object):
|
||||
return name in list(self.attrs.keys())
|
||||
|
||||
|
||||
class Element(html5lib.treebuilders._base.Node):
|
||||
class Element(treebuildersbase.Node):
|
||||
def __init__(self, element, soup, namespace):
|
||||
html5lib.treebuilders._base.Node.__init__(self, element.name)
|
||||
treebuildersbase.Node.__init__(self, element.name)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
self.namespace = namespace
|
||||
|
||||
def appendChild(self, node):
|
||||
string_child = child = None
|
||||
if isinstance(node, str):
|
||||
if isinstance(node, basestring):
|
||||
# Some other piece of code decided to pass in a string
|
||||
# instead of creating a TextElement object to contain the
|
||||
# string.
|
||||
@@ -161,7 +146,7 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
else:
|
||||
child = node.element
|
||||
|
||||
if not isinstance(child, str) and child.parent is not None:
|
||||
if not isinstance(child, basestring) and child.parent is not None:
|
||||
node.element.extract()
|
||||
|
||||
if (string_child and self.element.contents
|
||||
@@ -174,7 +159,7 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
old_element.replace_with(new_element)
|
||||
self.soup._most_recent_element = new_element
|
||||
else:
|
||||
if isinstance(node, str):
|
||||
if isinstance(node, basestring):
|
||||
# Create a brand new NavigableString from this string.
|
||||
child = self.soup.new_string(node)
|
||||
|
||||
@@ -183,12 +168,6 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
# immediately after the parent, if it has no children.)
|
||||
if self.element.contents:
|
||||
most_recent_element = self.element._last_descendant(False)
|
||||
elif self.element.next_element is not None:
|
||||
# Something from further ahead in the parse tree is
|
||||
# being inserted into this earlier element. This is
|
||||
# very annoying because it means an expensive search
|
||||
# for the last element in the tree.
|
||||
most_recent_element = self.soup._last_descendant()
|
||||
else:
|
||||
most_recent_element = self.element
|
||||
|
||||
@@ -200,7 +179,6 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
return AttrList(self.element)
|
||||
|
||||
def setAttributes(self, attributes):
|
||||
|
||||
if attributes is not None and len(attributes) > 0:
|
||||
|
||||
converted_attributes = []
|
||||
@@ -212,7 +190,7 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
|
||||
self.soup.builder._replace_cdata_list_attribute_values(
|
||||
self.name, attributes)
|
||||
for name, value in list(attributes.items()):
|
||||
for name, value in attributes.items():
|
||||
self.element[name] = value
|
||||
|
||||
# The attributes may contain variables that need substitution.
|
||||
@@ -247,9 +225,6 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
|
||||
def reparentChildren(self, new_parent):
|
||||
"""Move all of this tag's children into another tag."""
|
||||
# print "MOVE", self.element.contents
|
||||
# print "FROM", self.element
|
||||
# print "TO", new_parent.element
|
||||
element = self.element
|
||||
new_parent_element = new_parent.element
|
||||
# Determine what this tag's next_element will be once all the children
|
||||
@@ -268,28 +243,17 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
new_parents_last_descendant_next_element = new_parent_element.next_element
|
||||
|
||||
to_append = element.contents
|
||||
append_after = new_parent_element.contents
|
||||
append_after = new_parent.element.contents
|
||||
if len(to_append) > 0:
|
||||
# Set the first child's previous_element and previous_sibling
|
||||
# to elements within the new parent
|
||||
first_child = to_append[0]
|
||||
if new_parents_last_descendant:
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
else:
|
||||
first_child.previous_element = new_parent_element
|
||||
first_child.previous_element = new_parents_last_descendant
|
||||
first_child.previous_sibling = new_parents_last_child
|
||||
if new_parents_last_descendant:
|
||||
new_parents_last_descendant.next_element = first_child
|
||||
else:
|
||||
new_parent_element.next_element = first_child
|
||||
if new_parents_last_child:
|
||||
new_parents_last_child.next_sibling = first_child
|
||||
|
||||
# Fix the last child's next_element and next_sibling
|
||||
last_child = to_append[-1]
|
||||
last_child.next_element = new_parents_last_descendant_next_element
|
||||
if new_parents_last_descendant_next_element:
|
||||
new_parents_last_descendant_next_element.previous_element = last_child
|
||||
last_child.next_sibling = None
|
||||
|
||||
for child in to_append:
|
||||
@@ -300,10 +264,6 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
element.contents = []
|
||||
element.next_element = final_next_element
|
||||
|
||||
# print "DONE WITH MOVE"
|
||||
# print "FROM", self.element
|
||||
# print "TO", new_parent_element
|
||||
|
||||
def cloneNode(self):
|
||||
tag = self.soup.new_tag(self.element.name, self.namespace)
|
||||
node = Element(tag, self.soup, self.namespace)
|
||||
@@ -324,7 +284,7 @@ class Element(html5lib.treebuilders._base.Node):
|
||||
|
||||
class TextNode(Element):
|
||||
def __init__(self, element, soup):
|
||||
html5lib.treebuilders._base.Node.__init__(self, None)
|
||||
treebuildersbase.Node.__init__(self, None)
|
||||
self.element = element
|
||||
self.soup = soup
|
||||
|
||||
|
||||
@@ -4,16 +4,10 @@ __all__ = [
|
||||
'HTMLParserTreeBuilder',
|
||||
]
|
||||
|
||||
from html.parser import HTMLParser
|
||||
|
||||
try:
|
||||
from html.parser import HTMLParseError
|
||||
except ImportError as e:
|
||||
# HTMLParseError is removed in Python 3.5. Since it can never be
|
||||
# thrown in 3.5, we can just define our own class as a placeholder.
|
||||
class HTMLParseError(Exception):
|
||||
pass
|
||||
|
||||
from HTMLParser import (
|
||||
HTMLParser,
|
||||
HTMLParseError,
|
||||
)
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
@@ -25,10 +19,10 @@ import warnings
|
||||
# At the end of this file, we monkeypatch HTMLParser so that
|
||||
# strict=True works well on Python 3.2.2.
|
||||
major, minor, release = sys.version_info[:3]
|
||||
CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3
|
||||
CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3
|
||||
CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4
|
||||
|
||||
CONSTRUCTOR_TAKES_STRICT = (
|
||||
major > 3
|
||||
or (major == 3 and minor > 2)
|
||||
or (major == 3 and minor == 2 and release >= 3))
|
||||
|
||||
from bs4.element import (
|
||||
CData,
|
||||
@@ -69,8 +63,7 @@ class BeautifulSoupHTMLParser(HTMLParser):
|
||||
|
||||
def handle_charref(self, name):
|
||||
# XXX workaround for a bug in HTMLParser. Remove this once
|
||||
# it's fixed in all supported versions.
|
||||
# http://bugs.python.org/issue13633
|
||||
# it's fixed.
|
||||
if name.startswith('x'):
|
||||
real_name = int(name.lstrip('x'), 16)
|
||||
elif name.startswith('X'):
|
||||
@@ -79,9 +72,9 @@ class BeautifulSoupHTMLParser(HTMLParser):
|
||||
real_name = int(name)
|
||||
|
||||
try:
|
||||
data = chr(real_name)
|
||||
except (ValueError, OverflowError) as e:
|
||||
data = "\N{REPLACEMENT CHARACTER}"
|
||||
data = unichr(real_name)
|
||||
except (ValueError, OverflowError), e:
|
||||
data = u"\N{REPLACEMENT CHARACTER}"
|
||||
|
||||
self.handle_data(data)
|
||||
|
||||
@@ -120,6 +113,14 @@ class BeautifulSoupHTMLParser(HTMLParser):
|
||||
|
||||
def handle_pi(self, data):
|
||||
self.soup.endData()
|
||||
if data.endswith("?") and data.lower().startswith("xml"):
|
||||
# "An XHTML processing instruction using the trailing '?'
|
||||
# will cause the '?' to be included in data." - HTMLParser
|
||||
# docs.
|
||||
#
|
||||
# Strip the question mark so we don't end up with two
|
||||
# question marks.
|
||||
data = data[:-1]
|
||||
self.soup.handle_data(data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
|
||||
@@ -127,31 +128,26 @@ class BeautifulSoupHTMLParser(HTMLParser):
|
||||
class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
|
||||
is_xml = False
|
||||
picklable = True
|
||||
NAME = HTMLPARSER
|
||||
features = [NAME, HTML, STRICT]
|
||||
features = [HTML, STRICT, HTMLPARSER]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED:
|
||||
if CONSTRUCTOR_TAKES_STRICT:
|
||||
kwargs['strict'] = False
|
||||
if CONSTRUCTOR_TAKES_CONVERT_CHARREFS:
|
||||
kwargs['convert_charrefs'] = False
|
||||
self.parser_args = (args, kwargs)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
document_declared_encoding=None, exclude_encodings=None):
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:return: A 4-tuple (markup, original encoding, encoding
|
||||
declared within markup, whether any characters had to be
|
||||
replaced with REPLACEMENT CHARACTER).
|
||||
"""
|
||||
if isinstance(markup, str):
|
||||
if isinstance(markup, unicode):
|
||||
yield (markup, None, None, False)
|
||||
return
|
||||
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True,
|
||||
exclude_encodings=exclude_encodings)
|
||||
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
|
||||
yield (dammit.markup, dammit.original_encoding,
|
||||
dammit.declared_html_encoding,
|
||||
dammit.contains_replacement_characters)
|
||||
@@ -162,7 +158,7 @@ class HTMLParserTreeBuilder(HTMLTreeBuilder):
|
||||
parser.soup = self.soup
|
||||
try:
|
||||
parser.feed(markup)
|
||||
except HTMLParseError as e:
|
||||
except HTMLParseError, e:
|
||||
warnings.warn(RuntimeWarning(
|
||||
"Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
|
||||
raise e
|
||||
|
||||
@@ -4,15 +4,10 @@ __all__ = [
|
||||
]
|
||||
|
||||
from io import BytesIO
|
||||
from io import StringIO
|
||||
from StringIO import StringIO
|
||||
import collections
|
||||
from lxml import etree
|
||||
from bs4.element import (
|
||||
Comment,
|
||||
Doctype,
|
||||
NamespacedAttribute,
|
||||
ProcessingInstruction,
|
||||
)
|
||||
from bs4.element import Comment, Doctype, NamespacedAttribute
|
||||
from bs4.builder import (
|
||||
FAST,
|
||||
HTML,
|
||||
@@ -30,11 +25,8 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
|
||||
is_xml = True
|
||||
|
||||
NAME = "lxml-xml"
|
||||
ALTERNATE_NAMES = ["xml"]
|
||||
|
||||
# Well, it's permissive by XML parser standards.
|
||||
features = [NAME, LXML, XML, FAST, PERMISSIVE]
|
||||
features = [LXML, XML, FAST, PERMISSIVE]
|
||||
|
||||
CHUNK_SIZE = 512
|
||||
|
||||
@@ -78,7 +70,6 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
return (None, tag)
|
||||
|
||||
def prepare_markup(self, markup, user_specified_encoding=None,
|
||||
exclude_encodings=None,
|
||||
document_declared_encoding=None):
|
||||
"""
|
||||
:yield: A series of 4-tuples.
|
||||
@@ -87,12 +78,12 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
|
||||
Each 4-tuple represents a strategy for parsing the document.
|
||||
"""
|
||||
if isinstance(markup, str):
|
||||
if isinstance(markup, unicode):
|
||||
# We were given Unicode. Maybe lxml can parse Unicode on
|
||||
# this system?
|
||||
yield markup, None, document_declared_encoding, False
|
||||
|
||||
if isinstance(markup, str):
|
||||
if isinstance(markup, unicode):
|
||||
# No, apparently not. Convert the Unicode to UTF-8 and
|
||||
# tell lxml to parse it as UTF-8.
|
||||
yield (markup.encode("utf8"), "utf8",
|
||||
@@ -104,15 +95,14 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
# the document as each one in turn.
|
||||
is_html = not self.is_xml
|
||||
try_encodings = [user_specified_encoding, document_declared_encoding]
|
||||
detector = EncodingDetector(
|
||||
markup, try_encodings, is_html, exclude_encodings)
|
||||
detector = EncodingDetector(markup, try_encodings, is_html)
|
||||
for encoding in detector.encodings:
|
||||
yield (detector.markup, encoding, document_declared_encoding, False)
|
||||
|
||||
def feed(self, markup):
|
||||
if isinstance(markup, bytes):
|
||||
markup = BytesIO(markup)
|
||||
elif isinstance(markup, str):
|
||||
elif isinstance(markup, unicode):
|
||||
markup = StringIO(markup)
|
||||
|
||||
# Call feed() at least once, even if the markup is empty,
|
||||
@@ -127,7 +117,7 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
if len(data) != 0:
|
||||
self.parser.feed(data)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError) as e:
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
def close(self):
|
||||
@@ -145,12 +135,12 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
self.nsmaps.append(None)
|
||||
elif len(nsmap) > 0:
|
||||
# A new namespace mapping has come into play.
|
||||
inverted_nsmap = dict((value, key) for key, value in list(nsmap.items()))
|
||||
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
|
||||
self.nsmaps.append(inverted_nsmap)
|
||||
# Also treat the namespace mapping as a set of attributes on the
|
||||
# tag, so we can recreate it later.
|
||||
attrs = attrs.copy()
|
||||
for prefix, namespace in list(nsmap.items()):
|
||||
for prefix, namespace in nsmap.items():
|
||||
attribute = NamespacedAttribute(
|
||||
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
|
||||
attrs[attribute] = namespace
|
||||
@@ -159,7 +149,7 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
# from lxml with namespaces attached to their names, and
|
||||
# turn then into NamespacedAttribute objects.
|
||||
new_attrs = {}
|
||||
for attr, value in list(attrs.items()):
|
||||
for attr, value in attrs.items():
|
||||
namespace, attr = self._getNsTag(attr)
|
||||
if namespace is None:
|
||||
new_attrs[attr] = value
|
||||
@@ -199,9 +189,7 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
self.nsmaps.pop()
|
||||
|
||||
def pi(self, target, data):
|
||||
self.soup.endData()
|
||||
self.soup.handle_data(target + ' ' + data)
|
||||
self.soup.endData(ProcessingInstruction)
|
||||
pass
|
||||
|
||||
def data(self, content):
|
||||
self.soup.handle_data(content)
|
||||
@@ -219,15 +207,12 @@ class LXMLTreeBuilderForXML(TreeBuilder):
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return '<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
|
||||
|
||||
|
||||
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
|
||||
NAME = LXML
|
||||
ALTERNATE_NAMES = ["lxml-html"]
|
||||
|
||||
features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE]
|
||||
features = [LXML, HTML, FAST, PERMISSIVE]
|
||||
is_xml = False
|
||||
|
||||
def default_parser(self, encoding):
|
||||
@@ -239,10 +224,10 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
|
||||
self.parser = self.parser_for(encoding)
|
||||
self.parser.feed(markup)
|
||||
self.parser.close()
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError) as e:
|
||||
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
|
||||
raise ParserRejectedMarkup(str(e))
|
||||
|
||||
|
||||
def test_fragment_to_document(self, fragment):
|
||||
"""See `TreeBuilder`."""
|
||||
return '<html><body>%s</body></html>' % fragment
|
||||
return u'<html><body>%s</body></html>' % fragment
|
||||
|
||||
@@ -3,14 +3,12 @@
|
||||
|
||||
This library converts a bytestream to Unicode through any means
|
||||
necessary. It is heavily based on code from Mark Pilgrim's Universal
|
||||
Feed Parser. It works best on XML and HTML, but it does not rewrite the
|
||||
Feed Parser. It works best on XML and XML, but it does not rewrite the
|
||||
XML or HTML to reflect a new encoding; that's the tree builder's job.
|
||||
"""
|
||||
__license__ = "MIT"
|
||||
|
||||
from pdb import set_trace
|
||||
import codecs
|
||||
from html.entities import codepoint2name
|
||||
from htmlentitydefs import codepoint2name
|
||||
import re
|
||||
import logging
|
||||
import string
|
||||
@@ -58,7 +56,7 @@ class EntitySubstitution(object):
|
||||
reverse_lookup = {}
|
||||
characters_for_re = []
|
||||
for codepoint, name in list(codepoint2name.items()):
|
||||
character = chr(codepoint)
|
||||
character = unichr(codepoint)
|
||||
if codepoint != 34:
|
||||
# There's no point in turning the quotation mark into
|
||||
# ", unless it happens within an attribute value, which
|
||||
@@ -214,11 +212,8 @@ class EncodingDetector:
|
||||
|
||||
5. Windows-1252.
|
||||
"""
|
||||
def __init__(self, markup, override_encodings=None, is_html=False,
|
||||
exclude_encodings=None):
|
||||
def __init__(self, markup, override_encodings=None, is_html=False):
|
||||
self.override_encodings = override_encodings or []
|
||||
exclude_encodings = exclude_encodings or []
|
||||
self.exclude_encodings = set([x.lower() for x in exclude_encodings])
|
||||
self.chardet_encoding = None
|
||||
self.is_html = is_html
|
||||
self.declared_encoding = None
|
||||
@@ -229,8 +224,6 @@ class EncodingDetector:
|
||||
def _usable(self, encoding, tried):
|
||||
if encoding is not None:
|
||||
encoding = encoding.lower()
|
||||
if encoding in self.exclude_encodings:
|
||||
return False
|
||||
if encoding not in tried:
|
||||
tried.add(encoding)
|
||||
return True
|
||||
@@ -273,9 +266,6 @@ class EncodingDetector:
|
||||
def strip_byte_order_mark(cls, data):
|
||||
"""If a byte-order mark is present, strip it and return the encoding it implies."""
|
||||
encoding = None
|
||||
if isinstance(data, str):
|
||||
# Unicode data cannot have a byte-order mark.
|
||||
return data, encoding
|
||||
if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
|
||||
and (data[2:4] != '\x00\x00'):
|
||||
encoding = 'utf-16be'
|
||||
@@ -316,7 +306,7 @@ class EncodingDetector:
|
||||
declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
|
||||
if declared_encoding_match is not None:
|
||||
declared_encoding = declared_encoding_match.groups()[0].decode(
|
||||
'ascii', 'replace')
|
||||
'ascii')
|
||||
if declared_encoding:
|
||||
return declared_encoding.lower()
|
||||
return None
|
||||
@@ -341,19 +331,18 @@ class UnicodeDammit:
|
||||
]
|
||||
|
||||
def __init__(self, markup, override_encodings=[],
|
||||
smart_quotes_to=None, is_html=False, exclude_encodings=[]):
|
||||
smart_quotes_to=None, is_html=False):
|
||||
self.smart_quotes_to = smart_quotes_to
|
||||
self.tried_encodings = []
|
||||
self.contains_replacement_characters = False
|
||||
self.is_html = is_html
|
||||
|
||||
self.detector = EncodingDetector(
|
||||
markup, override_encodings, is_html, exclude_encodings)
|
||||
self.detector = EncodingDetector(markup, override_encodings, is_html)
|
||||
|
||||
# Short-circuit if the data is in Unicode to begin with.
|
||||
if isinstance(markup, str) or markup == '':
|
||||
if isinstance(markup, unicode) or markup == '':
|
||||
self.markup = markup
|
||||
self.unicode_markup = str(markup)
|
||||
self.unicode_markup = unicode(markup)
|
||||
self.original_encoding = None
|
||||
return
|
||||
|
||||
@@ -436,7 +425,7 @@ class UnicodeDammit:
|
||||
def _to_unicode(self, data, encoding, errors="strict"):
|
||||
'''Given a string and its encoding, decodes the string into Unicode.
|
||||
%encoding is a string recognized by encodings.aliases'''
|
||||
return str(data, encoding, errors)
|
||||
return unicode(data, encoding, errors)
|
||||
|
||||
@property
|
||||
def declared_html_encoding(self):
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
"""Diagnostic functions, mainly for use when doing tech support."""
|
||||
|
||||
__license__ = "MIT"
|
||||
|
||||
import cProfile
|
||||
from io import StringIO
|
||||
from html.parser import HTMLParser
|
||||
from StringIO import StringIO
|
||||
from HTMLParser import HTMLParser
|
||||
import bs4
|
||||
from bs4 import BeautifulSoup, __version__
|
||||
from bs4.builder import builder_registry
|
||||
@@ -20,8 +17,8 @@ import cProfile
|
||||
|
||||
def diagnose(data):
|
||||
"""Diagnostic suite for isolating common problems."""
|
||||
print("Diagnostic running on Beautiful Soup %s" % __version__)
|
||||
print("Python version %s" % sys.version)
|
||||
print "Diagnostic running on Beautiful Soup %s" % __version__
|
||||
print "Python version %s" % sys.version
|
||||
|
||||
basic_parsers = ["html.parser", "html5lib", "lxml"]
|
||||
for name in basic_parsers:
|
||||
@@ -30,53 +27,44 @@ def diagnose(data):
|
||||
break
|
||||
else:
|
||||
basic_parsers.remove(name)
|
||||
print((
|
||||
print (
|
||||
"I noticed that %s is not installed. Installing it may help." %
|
||||
name))
|
||||
name)
|
||||
|
||||
if 'lxml' in basic_parsers:
|
||||
basic_parsers.append(["lxml", "xml"])
|
||||
try:
|
||||
from lxml import etree
|
||||
print("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)))
|
||||
except ImportError as e:
|
||||
print (
|
||||
"lxml is not installed or couldn't be imported.")
|
||||
|
||||
from lxml import etree
|
||||
print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
|
||||
|
||||
if 'html5lib' in basic_parsers:
|
||||
try:
|
||||
import html5lib
|
||||
print("Found html5lib version %s" % html5lib.__version__)
|
||||
except ImportError as e:
|
||||
print (
|
||||
"html5lib is not installed or couldn't be imported.")
|
||||
import html5lib
|
||||
print "Found html5lib version %s" % html5lib.__version__
|
||||
|
||||
if hasattr(data, 'read'):
|
||||
data = data.read()
|
||||
elif os.path.exists(data):
|
||||
print('"%s" looks like a filename. Reading data from the file.' % data)
|
||||
print '"%s" looks like a filename. Reading data from the file.' % data
|
||||
data = open(data).read()
|
||||
elif data.startswith("http:") or data.startswith("https:"):
|
||||
print('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data)
|
||||
print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.")
|
||||
print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
|
||||
print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
|
||||
return
|
||||
print()
|
||||
print
|
||||
|
||||
for parser in basic_parsers:
|
||||
print("Trying to parse your markup with %s" % parser)
|
||||
print "Trying to parse your markup with %s" % parser
|
||||
success = False
|
||||
try:
|
||||
soup = BeautifulSoup(data, parser)
|
||||
success = True
|
||||
except Exception as e:
|
||||
print("%s could not parse the markup." % parser)
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print("Here's what %s did with the markup:" % parser)
|
||||
print(soup.prettify())
|
||||
print "Here's what %s did with the markup:" % parser
|
||||
print soup.prettify()
|
||||
|
||||
print("-" * 80)
|
||||
print "-" * 80
|
||||
|
||||
def lxml_trace(data, html=True, **kwargs):
|
||||
"""Print out the lxml events that occur during parsing.
|
||||
@@ -86,7 +74,7 @@ def lxml_trace(data, html=True, **kwargs):
|
||||
"""
|
||||
from lxml import etree
|
||||
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
|
||||
print(("%s, %4s, %s" % (event, element.tag, element.text)))
|
||||
print("%s, %4s, %s" % (event, element.tag, element.text))
|
||||
|
||||
class AnnouncingParser(HTMLParser):
|
||||
"""Announces HTMLParser parse events, without doing anything else."""
|
||||
@@ -168,9 +156,9 @@ def rdoc(num_elements=1000):
|
||||
|
||||
def benchmark_parsers(num_elements=100000):
|
||||
"""Very basic head-to-head performance benchmark."""
|
||||
print("Comparative parser benchmark on Beautiful Soup %s" % __version__)
|
||||
print "Comparative parser benchmark on Beautiful Soup %s" % __version__
|
||||
data = rdoc(num_elements)
|
||||
print("Generated a large invalid HTML document (%d bytes)." % len(data))
|
||||
print "Generated a large invalid HTML document (%d bytes)." % len(data)
|
||||
|
||||
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
|
||||
success = False
|
||||
@@ -179,24 +167,24 @@ def benchmark_parsers(num_elements=100000):
|
||||
soup = BeautifulSoup(data, parser)
|
||||
b = time.time()
|
||||
success = True
|
||||
except Exception as e:
|
||||
print("%s could not parse the markup." % parser)
|
||||
except Exception, e:
|
||||
print "%s could not parse the markup." % parser
|
||||
traceback.print_exc()
|
||||
if success:
|
||||
print("BS4+%s parsed the markup in %.2fs." % (parser, b-a))
|
||||
print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
|
||||
|
||||
from lxml import etree
|
||||
a = time.time()
|
||||
etree.HTML(data)
|
||||
b = time.time()
|
||||
print("Raw lxml parsed the markup in %.2fs." % (b-a))
|
||||
print "Raw lxml parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
import html5lib
|
||||
parser = html5lib.HTMLParser()
|
||||
a = time.time()
|
||||
parser.parse(data)
|
||||
b = time.time()
|
||||
print("Raw html5lib parsed the markup in %.2fs." % (b-a))
|
||||
print "Raw html5lib parsed the markup in %.2fs." % (b-a)
|
||||
|
||||
def profile(num_elements=100000, parser="lxml"):
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
__license__ = "MIT"
|
||||
|
||||
from pdb import set_trace
|
||||
import collections
|
||||
import re
|
||||
import sys
|
||||
@@ -24,22 +21,22 @@ def _alias(attr):
|
||||
return alias
|
||||
|
||||
|
||||
class NamespacedAttribute(str):
|
||||
class NamespacedAttribute(unicode):
|
||||
|
||||
def __new__(cls, prefix, name, namespace=None):
|
||||
if name is None:
|
||||
obj = str.__new__(cls, prefix)
|
||||
obj = unicode.__new__(cls, prefix)
|
||||
elif prefix is None:
|
||||
# Not really namespaced.
|
||||
obj = str.__new__(cls, name)
|
||||
obj = unicode.__new__(cls, name)
|
||||
else:
|
||||
obj = str.__new__(cls, prefix + ":" + name)
|
||||
obj = unicode.__new__(cls, prefix + ":" + name)
|
||||
obj.prefix = prefix
|
||||
obj.name = name
|
||||
obj.namespace = namespace
|
||||
return obj
|
||||
|
||||
class AttributeValueWithCharsetSubstitution(str):
|
||||
class AttributeValueWithCharsetSubstitution(unicode):
|
||||
"""A stand-in object for a character encoding specified in HTML."""
|
||||
|
||||
class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution):
|
||||
@@ -50,7 +47,7 @@ class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution):
|
||||
"""
|
||||
|
||||
def __new__(cls, original_value):
|
||||
obj = str.__new__(cls, original_value)
|
||||
obj = unicode.__new__(cls, original_value)
|
||||
obj.original_value = original_value
|
||||
return obj
|
||||
|
||||
@@ -73,9 +70,9 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
|
||||
match = cls.CHARSET_RE.search(original_value)
|
||||
if match is None:
|
||||
# No substitution necessary.
|
||||
return str.__new__(str, original_value)
|
||||
return unicode.__new__(unicode, original_value)
|
||||
|
||||
obj = str.__new__(cls, original_value)
|
||||
obj = unicode.__new__(cls, original_value)
|
||||
obj.original_value = original_value
|
||||
return obj
|
||||
|
||||
@@ -155,7 +152,7 @@ class PageElement(object):
|
||||
|
||||
def format_string(self, s, formatter='minimal'):
|
||||
"""Format the given string using the given formatter."""
|
||||
if not isinstance(formatter, collections.Callable):
|
||||
if not callable(formatter):
|
||||
formatter = self._formatter_for_name(formatter)
|
||||
if formatter is None:
|
||||
output = s
|
||||
@@ -188,40 +185,24 @@ class PageElement(object):
|
||||
return self.HTML_FORMATTERS.get(
|
||||
name, HTMLAwareEntitySubstitution.substitute_xml)
|
||||
|
||||
def setup(self, parent=None, previous_element=None, next_element=None,
|
||||
previous_sibling=None, next_sibling=None):
|
||||
def setup(self, parent=None, previous_element=None):
|
||||
"""Sets up the initial relations between this element and
|
||||
other elements."""
|
||||
self.parent = parent
|
||||
|
||||
self.previous_element = previous_element
|
||||
if previous_element is not None:
|
||||
self.previous_element.next_element = self
|
||||
|
||||
self.next_element = next_element
|
||||
if self.next_element:
|
||||
self.next_element.previous_element = self
|
||||
|
||||
self.next_sibling = next_sibling
|
||||
if self.next_sibling:
|
||||
self.next_sibling.previous_sibling = self
|
||||
|
||||
if (not previous_sibling
|
||||
and self.parent is not None and self.parent.contents):
|
||||
previous_sibling = self.parent.contents[-1]
|
||||
|
||||
self.previous_sibling = previous_sibling
|
||||
if previous_sibling:
|
||||
self.next_element = None
|
||||
self.previous_sibling = None
|
||||
self.next_sibling = None
|
||||
if self.parent is not None and self.parent.contents:
|
||||
self.previous_sibling = self.parent.contents[-1]
|
||||
self.previous_sibling.next_sibling = self
|
||||
|
||||
nextSibling = _alias("next_sibling") # BS3
|
||||
previousSibling = _alias("previous_sibling") # BS3
|
||||
|
||||
def replace_with(self, replace_with):
|
||||
if not self.parent:
|
||||
raise ValueError(
|
||||
"Cannot replace one element with another when the"
|
||||
"element to be replaced is not part of a tree.")
|
||||
if replace_with is self:
|
||||
return
|
||||
if replace_with is self.parent:
|
||||
@@ -235,10 +216,6 @@ class PageElement(object):
|
||||
|
||||
def unwrap(self):
|
||||
my_parent = self.parent
|
||||
if not self.parent:
|
||||
raise ValueError(
|
||||
"Cannot replace an element with its contents when that"
|
||||
"element is not part of a tree.")
|
||||
my_index = self.parent.index(self)
|
||||
self.extract()
|
||||
for child in reversed(self.contents[:]):
|
||||
@@ -263,20 +240,17 @@ class PageElement(object):
|
||||
last_child = self._last_descendant()
|
||||
next_element = last_child.next_element
|
||||
|
||||
if (self.previous_element is not None and
|
||||
self.previous_element is not next_element):
|
||||
if self.previous_element is not None:
|
||||
self.previous_element.next_element = next_element
|
||||
if next_element is not None and next_element is not self.previous_element:
|
||||
if next_element is not None:
|
||||
next_element.previous_element = self.previous_element
|
||||
self.previous_element = None
|
||||
last_child.next_element = None
|
||||
|
||||
self.parent = None
|
||||
if (self.previous_sibling is not None
|
||||
and self.previous_sibling is not self.next_sibling):
|
||||
if self.previous_sibling is not None:
|
||||
self.previous_sibling.next_sibling = self.next_sibling
|
||||
if (self.next_sibling is not None
|
||||
and self.next_sibling is not self.previous_sibling):
|
||||
if self.next_sibling is not None:
|
||||
self.next_sibling.previous_sibling = self.previous_sibling
|
||||
self.previous_sibling = self.next_sibling = None
|
||||
return self
|
||||
@@ -289,18 +263,16 @@ class PageElement(object):
|
||||
last_child = self
|
||||
while isinstance(last_child, Tag) and last_child.contents:
|
||||
last_child = last_child.contents[-1]
|
||||
if not accept_self and last_child is self:
|
||||
if not accept_self and last_child == self:
|
||||
last_child = None
|
||||
return last_child
|
||||
# BS3: Not part of the API!
|
||||
_lastRecursiveChild = _last_descendant
|
||||
|
||||
def insert(self, position, new_child):
|
||||
if new_child is None:
|
||||
raise ValueError("Cannot insert None into a tag.")
|
||||
if new_child is self:
|
||||
raise ValueError("Cannot insert a tag into itself.")
|
||||
if (isinstance(new_child, str)
|
||||
if (isinstance(new_child, basestring)
|
||||
and not isinstance(new_child, NavigableString)):
|
||||
new_child = NavigableString(new_child)
|
||||
|
||||
@@ -506,10 +478,6 @@ class PageElement(object):
|
||||
def _find_all(self, name, attrs, text, limit, generator, **kwargs):
|
||||
"Iterates over a generator looking for things that match."
|
||||
|
||||
if text is None and 'string' in kwargs:
|
||||
text = kwargs['string']
|
||||
del kwargs['string']
|
||||
|
||||
if isinstance(name, SoupStrainer):
|
||||
strainer = name
|
||||
else:
|
||||
@@ -521,7 +489,7 @@ class PageElement(object):
|
||||
result = (element for element in generator
|
||||
if isinstance(element, Tag))
|
||||
return ResultSet(strainer, result)
|
||||
elif isinstance(name, str):
|
||||
elif isinstance(name, basestring):
|
||||
# Optimization to find all tags with a given name.
|
||||
result = (element for element in generator
|
||||
if isinstance(element, Tag)
|
||||
@@ -580,17 +548,17 @@ class PageElement(object):
|
||||
|
||||
# Methods for supporting CSS selectors.
|
||||
|
||||
tag_name_re = re.compile('^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$')
|
||||
tag_name_re = re.compile('^[a-z0-9]+$')
|
||||
|
||||
# /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
|
||||
# \---------------------------/ \---/\-------------/ \-------/
|
||||
# | | | |
|
||||
# | | | The value
|
||||
# | | ~,|,^,$,* or =
|
||||
# | Attribute
|
||||
# /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
|
||||
# \---/ \---/\-------------/ \-------/
|
||||
# | | | |
|
||||
# | | | The value
|
||||
# | | ~,|,^,$,* or =
|
||||
# | Attribute
|
||||
# Tag
|
||||
attribselect_re = re.compile(
|
||||
r'^(?P<tag>[a-zA-Z0-9][-.a-zA-Z0-9:_]*)?\[(?P<attribute>[\w-]+)(?P<operator>[=~\|\^\$\*]?)' +
|
||||
r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' +
|
||||
r'=?"?(?P<value>[^\]"]*)"?\]$'
|
||||
)
|
||||
|
||||
@@ -672,7 +640,7 @@ class PageElement(object):
|
||||
return self.parents
|
||||
|
||||
|
||||
class NavigableString(str, PageElement):
|
||||
class NavigableString(unicode, PageElement):
|
||||
|
||||
PREFIX = ''
|
||||
SUFFIX = ''
|
||||
@@ -685,21 +653,15 @@ class NavigableString(str, PageElement):
|
||||
passed in to the superclass's __new__ or the superclass won't know
|
||||
how to handle non-ASCII characters.
|
||||
"""
|
||||
if isinstance(value, str):
|
||||
u = str.__new__(cls, value)
|
||||
else:
|
||||
u = str.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
|
||||
u.setup()
|
||||
return u
|
||||
if isinstance(value, unicode):
|
||||
return unicode.__new__(cls, value)
|
||||
return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
|
||||
|
||||
def __copy__(self):
|
||||
"""A copy of a NavigableString has the same contents and class
|
||||
as the original, but it is not connected to the parse tree.
|
||||
"""
|
||||
return type(self)(self)
|
||||
return self
|
||||
|
||||
def __getnewargs__(self):
|
||||
return (str(self),)
|
||||
return (unicode(self),)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"""text.string gives you text. This is for backwards
|
||||
@@ -739,23 +701,23 @@ class PreformattedString(NavigableString):
|
||||
|
||||
class CData(PreformattedString):
|
||||
|
||||
PREFIX = '<![CDATA['
|
||||
SUFFIX = ']]>'
|
||||
PREFIX = u'<![CDATA['
|
||||
SUFFIX = u']]>'
|
||||
|
||||
class ProcessingInstruction(PreformattedString):
|
||||
|
||||
PREFIX = '<?'
|
||||
SUFFIX = '>'
|
||||
PREFIX = u'<?'
|
||||
SUFFIX = u'?>'
|
||||
|
||||
class Comment(PreformattedString):
|
||||
|
||||
PREFIX = '<!--'
|
||||
SUFFIX = '-->'
|
||||
PREFIX = u'<!--'
|
||||
SUFFIX = u'-->'
|
||||
|
||||
|
||||
class Declaration(PreformattedString):
|
||||
PREFIX = '<?'
|
||||
SUFFIX = '?>'
|
||||
PREFIX = u'<!'
|
||||
SUFFIX = u'!>'
|
||||
|
||||
|
||||
class Doctype(PreformattedString):
|
||||
@@ -772,8 +734,8 @@ class Doctype(PreformattedString):
|
||||
|
||||
return Doctype(value)
|
||||
|
||||
PREFIX = '<!DOCTYPE '
|
||||
SUFFIX = '>\n'
|
||||
PREFIX = u'<!DOCTYPE '
|
||||
SUFFIX = u'>\n'
|
||||
|
||||
|
||||
class Tag(PageElement):
|
||||
@@ -797,12 +759,9 @@ class Tag(PageElement):
|
||||
self.prefix = prefix
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
elif attrs:
|
||||
if builder is not None and builder.cdata_list_attributes:
|
||||
attrs = builder._replace_cdata_list_attribute_values(
|
||||
self.name, attrs)
|
||||
else:
|
||||
attrs = dict(attrs)
|
||||
elif attrs and builder.cdata_list_attributes:
|
||||
attrs = builder._replace_cdata_list_attribute_values(
|
||||
self.name, attrs)
|
||||
else:
|
||||
attrs = dict(attrs)
|
||||
self.attrs = attrs
|
||||
@@ -819,18 +778,6 @@ class Tag(PageElement):
|
||||
|
||||
parserClass = _alias("parser_class") # BS3
|
||||
|
||||
def __copy__(self):
|
||||
"""A copy of a Tag is a new Tag, unconnected to the parse tree.
|
||||
Its contents are a copy of the old Tag's contents.
|
||||
"""
|
||||
clone = type(self)(None, self.builder, self.name, self.namespace,
|
||||
self.nsprefix, self.attrs)
|
||||
for attr in ('can_be_empty_element', 'hidden'):
|
||||
setattr(clone, attr, getattr(self, attr))
|
||||
for child in self.contents:
|
||||
clone.append(child.__copy__())
|
||||
return clone
|
||||
|
||||
@property
|
||||
def is_empty_element(self):
|
||||
"""Is this tag an empty-element tag? (aka a self-closing tag)
|
||||
@@ -896,7 +843,7 @@ class Tag(PageElement):
|
||||
for string in self._all_strings(True):
|
||||
yield string
|
||||
|
||||
def get_text(self, separator="", strip=False,
|
||||
def get_text(self, separator=u"", strip=False,
|
||||
types=(NavigableString, CData)):
|
||||
"""
|
||||
Get all child strings, concatenated using the given separator.
|
||||
@@ -968,7 +915,7 @@ class Tag(PageElement):
|
||||
def __contains__(self, x):
|
||||
return x in self.contents
|
||||
|
||||
def __bool__(self):
|
||||
def __nonzero__(self):
|
||||
"A tag is non-None even if it has no contents."
|
||||
return True
|
||||
|
||||
@@ -1024,25 +971,15 @@ class Tag(PageElement):
|
||||
as defined in __eq__."""
|
||||
return not self == other
|
||||
|
||||
def __repr__(self, encoding="unicode-escape"):
|
||||
def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
|
||||
"""Renders this tag as a string."""
|
||||
if PY3K:
|
||||
# "The return value must be a string object", i.e. Unicode
|
||||
return self.decode()
|
||||
else:
|
||||
# "The return value must be a string object", i.e. a bytestring.
|
||||
# By convention, the return value of __repr__ should also be
|
||||
# an ASCII string.
|
||||
return self.encode(encoding)
|
||||
return self.encode(encoding)
|
||||
|
||||
def __unicode__(self):
|
||||
return self.decode()
|
||||
|
||||
def __str__(self):
|
||||
if PY3K:
|
||||
return self.decode()
|
||||
else:
|
||||
return self.encode()
|
||||
return self.encode()
|
||||
|
||||
if PY3K:
|
||||
__str__ = __repr__ = __unicode__
|
||||
@@ -1077,7 +1014,7 @@ class Tag(PageElement):
|
||||
|
||||
# First off, turn a string formatter into a function. This
|
||||
# will stop the lookup from happening over and over again.
|
||||
if not isinstance(formatter, collections.Callable):
|
||||
if not callable(formatter):
|
||||
formatter = self._formatter_for_name(formatter)
|
||||
|
||||
attrs = []
|
||||
@@ -1088,8 +1025,8 @@ class Tag(PageElement):
|
||||
else:
|
||||
if isinstance(val, list) or isinstance(val, tuple):
|
||||
val = ' '.join(val)
|
||||
elif not isinstance(val, str):
|
||||
val = str(val)
|
||||
elif not isinstance(val, basestring):
|
||||
val = unicode(val)
|
||||
elif (
|
||||
isinstance(val, AttributeValueWithCharsetSubstitution)
|
||||
and eventual_encoding is not None):
|
||||
@@ -1097,7 +1034,7 @@ class Tag(PageElement):
|
||||
|
||||
text = self.format_string(val, formatter)
|
||||
decoded = (
|
||||
str(key) + '='
|
||||
unicode(key) + '='
|
||||
+ EntitySubstitution.quoted_attribute_value(text))
|
||||
attrs.append(decoded)
|
||||
close = ''
|
||||
@@ -1166,22 +1103,16 @@ class Tag(PageElement):
|
||||
formatter="minimal"):
|
||||
"""Renders the contents of this tag as a Unicode string.
|
||||
|
||||
:param indent_level: Each line of the rendering will be
|
||||
indented this many spaces.
|
||||
|
||||
:param eventual_encoding: The tag is destined to be
|
||||
encoded into this encoding. This method is _not_
|
||||
responsible for performing that encoding. This information
|
||||
is passed in so that it can be substituted in if the
|
||||
document contains a <META> tag that mentions the document's
|
||||
encoding.
|
||||
|
||||
:param formatter: The output formatter responsible for converting
|
||||
entities to Unicode characters.
|
||||
"""
|
||||
# First off, turn a string formatter into a function. This
|
||||
# will stop the lookup from happening over and over again.
|
||||
if not isinstance(formatter, collections.Callable):
|
||||
if not callable(formatter):
|
||||
formatter = self._formatter_for_name(formatter)
|
||||
|
||||
pretty_print = (indent_level is not None)
|
||||
@@ -1206,17 +1137,7 @@ class Tag(PageElement):
|
||||
def encode_contents(
|
||||
self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
|
||||
formatter="minimal"):
|
||||
"""Renders the contents of this tag as a bytestring.
|
||||
|
||||
:param indent_level: Each line of the rendering will be
|
||||
indented this many spaces.
|
||||
|
||||
:param eventual_encoding: The bytestring will be in this encoding.
|
||||
|
||||
:param formatter: The output formatter responsible for converting
|
||||
entities to Unicode characters.
|
||||
"""
|
||||
|
||||
"""Renders the contents of this tag as a bytestring."""
|
||||
contents = self.decode_contents(indent_level, encoding, formatter)
|
||||
return contents.encode(encoding)
|
||||
|
||||
@@ -1280,57 +1201,26 @@ class Tag(PageElement):
|
||||
|
||||
_selector_combinators = ['>', '+', '~']
|
||||
_select_debug = False
|
||||
def select_one(self, selector):
|
||||
def select(self, selector, _candidate_generator=None):
|
||||
"""Perform a CSS selection operation on the current element."""
|
||||
value = self.select(selector, limit=1)
|
||||
if value:
|
||||
return value[0]
|
||||
return None
|
||||
|
||||
def select(self, selector, _candidate_generator=None, limit=None):
|
||||
"""Perform a CSS selection operation on the current element."""
|
||||
|
||||
# Handle grouping selectors if ',' exists, ie: p,a
|
||||
if ',' in selector:
|
||||
context = []
|
||||
for partial_selector in selector.split(','):
|
||||
partial_selector = partial_selector.strip()
|
||||
if partial_selector == '':
|
||||
raise ValueError('Invalid group selection syntax: %s' % selector)
|
||||
candidates = self.select(partial_selector, limit=limit)
|
||||
for candidate in candidates:
|
||||
if candidate not in context:
|
||||
context.append(candidate)
|
||||
|
||||
if limit and len(context) >= limit:
|
||||
break
|
||||
return context
|
||||
|
||||
tokens = selector.split()
|
||||
current_context = [self]
|
||||
|
||||
if tokens[-1] in self._selector_combinators:
|
||||
raise ValueError(
|
||||
'Final combinator "%s" is missing an argument.' % tokens[-1])
|
||||
|
||||
if self._select_debug:
|
||||
print('Running CSS selector "%s"' % selector)
|
||||
|
||||
print 'Running CSS selector "%s"' % selector
|
||||
for index, token in enumerate(tokens):
|
||||
new_context = []
|
||||
new_context_ids = set([])
|
||||
|
||||
if self._select_debug:
|
||||
print ' Considering token "%s"' % token
|
||||
recursive_candidate_generator = None
|
||||
tag_name = None
|
||||
if tokens[index-1] in self._selector_combinators:
|
||||
# This token was consumed by the previous combinator. Skip it.
|
||||
if self._select_debug:
|
||||
print(' Token was consumed by the previous combinator.')
|
||||
print ' Token was consumed by the previous combinator.'
|
||||
continue
|
||||
|
||||
if self._select_debug:
|
||||
print(' Considering token "%s"' % token)
|
||||
recursive_candidate_generator = None
|
||||
tag_name = None
|
||||
|
||||
# Each operation corresponds to a checker function, a rule
|
||||
# for determining whether a candidate matches the
|
||||
# selector. Candidates are generated by the active
|
||||
@@ -1366,38 +1256,35 @@ class Tag(PageElement):
|
||||
"A pseudo-class must be prefixed with a tag name.")
|
||||
pseudo_attributes = re.match('([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
|
||||
found = []
|
||||
if pseudo_attributes is None:
|
||||
pseudo_type = pseudo
|
||||
pseudo_value = None
|
||||
else:
|
||||
if pseudo_attributes is not None:
|
||||
pseudo_type, pseudo_value = pseudo_attributes.groups()
|
||||
if pseudo_type == 'nth-of-type':
|
||||
try:
|
||||
pseudo_value = int(pseudo_value)
|
||||
except:
|
||||
raise NotImplementedError(
|
||||
'Only numeric values are currently supported for the nth-of-type pseudo-class.')
|
||||
if pseudo_value < 1:
|
||||
raise ValueError(
|
||||
'nth-of-type pseudo-class value must be at least 1.')
|
||||
class Counter(object):
|
||||
def __init__(self, destination):
|
||||
self.count = 0
|
||||
self.destination = destination
|
||||
if pseudo_type == 'nth-of-type':
|
||||
try:
|
||||
pseudo_value = int(pseudo_value)
|
||||
except:
|
||||
raise NotImplementedError(
|
||||
'Only numeric values are currently supported for the nth-of-type pseudo-class.')
|
||||
if pseudo_value < 1:
|
||||
raise ValueError(
|
||||
'nth-of-type pseudo-class value must be at least 1.')
|
||||
class Counter(object):
|
||||
def __init__(self, destination):
|
||||
self.count = 0
|
||||
self.destination = destination
|
||||
|
||||
def nth_child_of_type(self, tag):
|
||||
self.count += 1
|
||||
if self.count == self.destination:
|
||||
return True
|
||||
if self.count > self.destination:
|
||||
# Stop the generator that's sending us
|
||||
# these things.
|
||||
raise StopIteration()
|
||||
return False
|
||||
checker = Counter(pseudo_value).nth_child_of_type
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Only the following pseudo-classes are implemented: nth-of-type.')
|
||||
def nth_child_of_type(self, tag):
|
||||
self.count += 1
|
||||
if self.count == self.destination:
|
||||
return True
|
||||
if self.count > self.destination:
|
||||
# Stop the generator that's sending us
|
||||
# these things.
|
||||
raise StopIteration()
|
||||
return False
|
||||
checker = Counter(pseudo_value).nth_child_of_type
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
'Only the following pseudo-classes are implemented: nth-of-type.')
|
||||
|
||||
elif token == '*':
|
||||
# Star selector -- matches everything
|
||||
@@ -1424,6 +1311,7 @@ class Tag(PageElement):
|
||||
else:
|
||||
raise ValueError(
|
||||
'Unsupported or invalid CSS selector: "%s"' % token)
|
||||
|
||||
if recursive_candidate_generator:
|
||||
# This happens when the selector looks like "> foo".
|
||||
#
|
||||
@@ -1437,14 +1325,14 @@ class Tag(PageElement):
|
||||
next_token = tokens[index+1]
|
||||
def recursive_select(tag):
|
||||
if self._select_debug:
|
||||
print(' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs))
|
||||
print('-' * 40)
|
||||
print ' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs)
|
||||
print '-' * 40
|
||||
for i in tag.select(next_token, recursive_candidate_generator):
|
||||
if self._select_debug:
|
||||
print('(Recursive select picked up candidate %s %s)' % (i.name, i.attrs))
|
||||
print '(Recursive select picked up candidate %s %s)' % (i.name, i.attrs)
|
||||
yield i
|
||||
if self._select_debug:
|
||||
print('-' * 40)
|
||||
print '-' * 40
|
||||
_use_candidate_generator = recursive_select
|
||||
elif _candidate_generator is None:
|
||||
# By default, a tag's candidates are all of its
|
||||
@@ -1455,7 +1343,7 @@ class Tag(PageElement):
|
||||
check = "[any]"
|
||||
else:
|
||||
check = tag_name
|
||||
print(' Default candidate generator, tag name="%s"' % check)
|
||||
print ' Default candidate generator, tag name="%s"' % check
|
||||
if self._select_debug:
|
||||
# This is redundant with later code, but it stops
|
||||
# a bunch of bogus tags from cluttering up the
|
||||
@@ -1473,11 +1361,12 @@ class Tag(PageElement):
|
||||
else:
|
||||
_use_candidate_generator = _candidate_generator
|
||||
|
||||
count = 0
|
||||
new_context = []
|
||||
new_context_ids = set([])
|
||||
for tag in current_context:
|
||||
if self._select_debug:
|
||||
print(" Running candidate generator on %s %s" % (
|
||||
tag.name, repr(tag.attrs)))
|
||||
print " Running candidate generator on %s %s" % (
|
||||
tag.name, repr(tag.attrs))
|
||||
for candidate in _use_candidate_generator(tag):
|
||||
if not isinstance(candidate, Tag):
|
||||
continue
|
||||
@@ -1492,24 +1381,21 @@ class Tag(PageElement):
|
||||
break
|
||||
if checker is None or result:
|
||||
if self._select_debug:
|
||||
print(" SUCCESS %s %s" % (candidate.name, repr(candidate.attrs)))
|
||||
print " SUCCESS %s %s" % (candidate.name, repr(candidate.attrs))
|
||||
if id(candidate) not in new_context_ids:
|
||||
# If a tag matches a selector more than once,
|
||||
# don't include it in the context more than once.
|
||||
new_context.append(candidate)
|
||||
new_context_ids.add(id(candidate))
|
||||
if limit and len(new_context) >= limit:
|
||||
break
|
||||
elif self._select_debug:
|
||||
print(" FAILURE %s %s" % (candidate.name, repr(candidate.attrs)))
|
||||
|
||||
print " FAILURE %s %s" % (candidate.name, repr(candidate.attrs))
|
||||
|
||||
current_context = new_context
|
||||
|
||||
if self._select_debug:
|
||||
print("Final verdict:")
|
||||
print "Final verdict:"
|
||||
for i in current_context:
|
||||
print(" %s %s" % (i.name, i.attrs))
|
||||
print " %s %s" % (i.name, i.attrs)
|
||||
return current_context
|
||||
|
||||
# Old names for backwards compatibility
|
||||
@@ -1553,7 +1439,7 @@ class SoupStrainer(object):
|
||||
else:
|
||||
attrs = kwargs
|
||||
normalized_attrs = {}
|
||||
for key, value in list(attrs.items()):
|
||||
for key, value in attrs.items():
|
||||
normalized_attrs[key] = self._normalize_search_value(value)
|
||||
|
||||
self.attrs = normalized_attrs
|
||||
@@ -1562,7 +1448,7 @@ class SoupStrainer(object):
|
||||
def _normalize_search_value(self, value):
|
||||
# Leave it alone if it's a Unicode string, a callable, a
|
||||
# regular expression, a boolean, or None.
|
||||
if (isinstance(value, str) or isinstance(value, collections.Callable) or hasattr(value, 'match')
|
||||
if (isinstance(value, unicode) or callable(value) or hasattr(value, 'match')
|
||||
or isinstance(value, bool) or value is None):
|
||||
return value
|
||||
|
||||
@@ -1575,7 +1461,7 @@ class SoupStrainer(object):
|
||||
new_value = []
|
||||
for v in value:
|
||||
if (hasattr(v, '__iter__') and not isinstance(v, bytes)
|
||||
and not isinstance(v, str)):
|
||||
and not isinstance(v, unicode)):
|
||||
# This is almost certainly the user's mistake. In the
|
||||
# interests of avoiding infinite loops, we'll let
|
||||
# it through as-is rather than doing a recursive call.
|
||||
@@ -1587,7 +1473,7 @@ class SoupStrainer(object):
|
||||
# Otherwise, convert it into a Unicode string.
|
||||
# The unicode(str()) thing is so this will do the same thing on Python 2
|
||||
# and Python 3.
|
||||
return str(str(value))
|
||||
return unicode(str(value))
|
||||
|
||||
def __str__(self):
|
||||
if self.text:
|
||||
@@ -1641,7 +1527,7 @@ class SoupStrainer(object):
|
||||
found = None
|
||||
# If given a list of items, scan it for a text element that
|
||||
# matches.
|
||||
if hasattr(markup, '__iter__') and not isinstance(markup, (Tag, str)):
|
||||
if hasattr(markup, '__iter__') and not isinstance(markup, (Tag, basestring)):
|
||||
for element in markup:
|
||||
if isinstance(element, NavigableString) \
|
||||
and self.search(element):
|
||||
@@ -1654,7 +1540,7 @@ class SoupStrainer(object):
|
||||
found = self.search_tag(markup)
|
||||
# If it's text, make sure the text matches.
|
||||
elif isinstance(markup, NavigableString) or \
|
||||
isinstance(markup, str):
|
||||
isinstance(markup, basestring):
|
||||
if not self.name and not self.attrs and self._matches(markup, self.text):
|
||||
found = markup
|
||||
else:
|
||||
@@ -1668,7 +1554,7 @@ class SoupStrainer(object):
|
||||
if isinstance(markup, list) or isinstance(markup, tuple):
|
||||
# This should only happen when searching a multi-valued attribute
|
||||
# like 'class'.
|
||||
if (isinstance(match_against, str)
|
||||
if (isinstance(match_against, unicode)
|
||||
and ' ' in match_against):
|
||||
# A bit of a special case. If they try to match "foo
|
||||
# bar" on a multivalue attribute's value, only accept
|
||||
@@ -1703,7 +1589,7 @@ class SoupStrainer(object):
|
||||
# None matches None, False, an empty string, an empty list, and so on.
|
||||
return not match_against
|
||||
|
||||
if isinstance(match_against, str):
|
||||
if isinstance(match_against, unicode):
|
||||
# Exact string match
|
||||
return markup == match_against
|
||||
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
"""Helper classes for tests."""
|
||||
|
||||
__license__ = "MIT"
|
||||
|
||||
import pickle
|
||||
import copy
|
||||
import functools
|
||||
import unittest
|
||||
@@ -46,16 +43,6 @@ class SoupTest(unittest.TestCase):
|
||||
|
||||
self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
|
||||
|
||||
def assertConnectedness(self, element):
|
||||
"""Ensure that next_element and previous_element are properly
|
||||
set for all descendants of the given element.
|
||||
"""
|
||||
earlier = None
|
||||
for e in element.descendants:
|
||||
if earlier:
|
||||
self.assertEqual(e, earlier.next_element)
|
||||
self.assertEqual(earlier, e.previous_element)
|
||||
earlier = e
|
||||
|
||||
class HTMLTreeBuilderSmokeTest(object):
|
||||
|
||||
@@ -67,15 +54,6 @@ class HTMLTreeBuilderSmokeTest(object):
|
||||
markup in these tests, there's not much room for interpretation.
|
||||
"""
|
||||
|
||||
def test_pickle_and_unpickle_identity(self):
|
||||
# Pickling a tree, then unpickling it, yields a tree identical
|
||||
# to the original.
|
||||
tree = self.soup("<a><b>foo</a>")
|
||||
dumped = pickle.dumps(tree, 2)
|
||||
loaded = pickle.loads(dumped)
|
||||
self.assertEqual(loaded.__class__, BeautifulSoup)
|
||||
self.assertEqual(loaded.decode(), tree.decode())
|
||||
|
||||
def assertDoctypeHandled(self, doctype_fragment):
|
||||
"""Assert that a given doctype string is handled correctly."""
|
||||
doctype_str, soup = self._document_with_doctype(doctype_fragment)
|
||||
@@ -136,11 +114,6 @@ class HTMLTreeBuilderSmokeTest(object):
|
||||
soup.encode("utf-8").replace(b"\n", b""),
|
||||
markup.replace(b"\n", b""))
|
||||
|
||||
def test_processing_instruction(self):
|
||||
markup = b"""<?PITarget PIContent?>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode("utf8"))
|
||||
|
||||
def test_deepcopy(self):
|
||||
"""Make sure you can copy the tree builder.
|
||||
|
||||
@@ -182,23 +155,6 @@ class HTMLTreeBuilderSmokeTest(object):
|
||||
def test_nested_formatting_elements(self):
|
||||
self.assertSoupEquals("<em><em></em></em>")
|
||||
|
||||
def test_double_head(self):
|
||||
html = '''<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Ordinary HEAD element test</title>
|
||||
</head>
|
||||
<script type="text/javascript">
|
||||
alert("Help!");
|
||||
</script>
|
||||
<body>
|
||||
Hello, world!
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
soup = self.soup(html)
|
||||
self.assertEqual("text/javascript", soup.find('script')['type'])
|
||||
|
||||
def test_comment(self):
|
||||
# Comments are represented as Comment objects.
|
||||
markup = "<p>foo<!--foobar-->baz</p>"
|
||||
@@ -265,26 +221,18 @@ Hello, world!
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["css"], soup.div.div['class'])
|
||||
|
||||
def test_multivalued_attribute_on_html(self):
|
||||
# html5lib uses a different API to set the attributes ot the
|
||||
# <html> tag. This has caused problems with multivalued
|
||||
# attributes.
|
||||
markup = '<html class="a b"></html>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(["a", "b"], soup.html['class'])
|
||||
|
||||
def test_angle_brackets_in_attribute_values_are_escaped(self):
|
||||
self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>')
|
||||
|
||||
def test_entities_in_attributes_converted_to_unicode(self):
|
||||
expect = '<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
self.assertSoupEquals('<p id="piñata"></p>', expect)
|
||||
|
||||
def test_entities_in_text_converted_to_unicode(self):
|
||||
expect = '<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
self.assertSoupEquals("<p>piñata</p>", expect)
|
||||
@@ -295,7 +243,7 @@ Hello, world!
|
||||
'<p>I said "good day!"</p>')
|
||||
|
||||
def test_out_of_range_entity(self):
|
||||
expect = "\N{REPLACEMENT CHARACTER}"
|
||||
expect = u"\N{REPLACEMENT CHARACTER}"
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
self.assertSoupEquals("�", expect)
|
||||
@@ -305,35 +253,6 @@ Hello, world!
|
||||
soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
|
||||
self.assertEqual("p", soup.h2.string.next_element.name)
|
||||
self.assertEqual("p", soup.p.name)
|
||||
self.assertConnectedness(soup)
|
||||
|
||||
def test_head_tag_between_head_and_body(self):
|
||||
"Prevent recurrence of a bug in the html5lib treebuilder."
|
||||
content = """<html><head></head>
|
||||
<link></link>
|
||||
<body>foo</body>
|
||||
</html>
|
||||
"""
|
||||
soup = self.soup(content)
|
||||
self.assertNotEqual(None, soup.html.body)
|
||||
self.assertConnectedness(soup)
|
||||
|
||||
def test_multiple_copies_of_a_tag(self):
|
||||
"Prevent recurrence of a bug in the html5lib treebuilder."
|
||||
content = """<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<article id="a" >
|
||||
<div><a href="1"></div>
|
||||
<footer>
|
||||
<a href="2"></a>
|
||||
</footer>
|
||||
</article>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
soup = self.soup(content)
|
||||
self.assertConnectedness(soup.article)
|
||||
|
||||
def test_basic_namespaces(self):
|
||||
"""Parsers don't need to *understand* namespaces, but at the
|
||||
@@ -366,9 +285,9 @@ Hello, world!
|
||||
# A seemingly innocuous document... but it's in Unicode! And
|
||||
# it contains characters that can't be represented in the
|
||||
# encoding found in the declaration! The horror!
|
||||
markup = '<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual('Sacr\xe9 bleu!', soup.body.string)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
|
||||
|
||||
def test_soupstrainer(self):
|
||||
"""Parsers should be able to work with SoupStrainers."""
|
||||
@@ -408,7 +327,7 @@ Hello, world!
|
||||
# Both XML and HTML entities are converted to Unicode characters
|
||||
# during parsing.
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>"
|
||||
self.assertSoupEquals(text, expected)
|
||||
|
||||
def test_smart_quotes_converted_on_the_way_in(self):
|
||||
@@ -418,15 +337,15 @@ Hello, world!
|
||||
soup = self.soup(quote)
|
||||
self.assertEqual(
|
||||
soup.p.string,
|
||||
"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
|
||||
|
||||
def test_non_breaking_spaces_converted_on_the_way_in(self):
|
||||
soup = self.soup("<a> </a>")
|
||||
self.assertEqual(soup.a.string, "\N{NO-BREAK SPACE}" * 2)
|
||||
self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
|
||||
|
||||
def test_entities_converted_on_the_way_out(self):
|
||||
text = "<p><<sacré bleu!>></p>"
|
||||
expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
expected = u"<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8")
|
||||
soup = self.soup(text)
|
||||
self.assertEqual(soup.p.encode("utf-8"), expected)
|
||||
|
||||
@@ -435,7 +354,7 @@ Hello, world!
|
||||
# easy-to-understand document.
|
||||
|
||||
# Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
|
||||
unicode_html = '<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
|
||||
|
||||
# That's because we're going to encode it into ISO-Latin-1, and use
|
||||
# that to test.
|
||||
@@ -544,25 +463,11 @@ Hello, world!
|
||||
|
||||
class XMLTreeBuilderSmokeTest(object):
|
||||
|
||||
def test_pickle_and_unpickle_identity(self):
|
||||
# Pickling a tree, then unpickling it, yields a tree identical
|
||||
# to the original.
|
||||
tree = self.soup("<a><b>foo</a>")
|
||||
dumped = pickle.dumps(tree, 2)
|
||||
loaded = pickle.loads(dumped)
|
||||
self.assertEqual(loaded.__class__, BeautifulSoup)
|
||||
self.assertEqual(loaded.decode(), tree.decode())
|
||||
|
||||
def test_docstring_generated(self):
|
||||
soup = self.soup("<root/>")
|
||||
self.assertEqual(
|
||||
soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
|
||||
|
||||
def test_xml_declaration(self):
|
||||
markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>"""
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(markup, soup.encode("utf8"))
|
||||
|
||||
def test_real_xhtml_document(self):
|
||||
"""A real XHTML document should come out *exactly* the same as it went in."""
|
||||
markup = b"""<?xml version="1.0" encoding="utf-8"?>
|
||||
@@ -580,7 +485,7 @@ class XMLTreeBuilderSmokeTest(object):
|
||||
<script type="text/javascript">
|
||||
</script>
|
||||
"""
|
||||
soup = BeautifulSoup(doc, "lxml-xml")
|
||||
soup = BeautifulSoup(doc, "xml")
|
||||
# lxml would have stripped this while parsing, but we can add
|
||||
# it later.
|
||||
soup.script.string = 'console.log("< < hey > > ");'
|
||||
@@ -588,15 +493,15 @@ class XMLTreeBuilderSmokeTest(object):
|
||||
self.assertTrue(b"< < hey > >" in encoded)
|
||||
|
||||
def test_can_parse_unicode_document(self):
|
||||
markup = '<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual('Sacr\xe9 bleu!', soup.root.string)
|
||||
self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
|
||||
|
||||
def test_popping_namespaced_tag(self):
|
||||
markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(
|
||||
str(soup.rss), markup)
|
||||
unicode(soup.rss), markup)
|
||||
|
||||
def test_docstring_includes_correct_encoding(self):
|
||||
soup = self.soup("<root/>")
|
||||
@@ -627,17 +532,17 @@ class XMLTreeBuilderSmokeTest(object):
|
||||
def test_closing_namespaced_tag(self):
|
||||
markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(str(soup.p), markup)
|
||||
self.assertEqual(unicode(soup.p), markup)
|
||||
|
||||
def test_namespaced_attributes(self):
|
||||
markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(str(soup.foo), markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
def test_namespaced_attributes_xml_namespace(self):
|
||||
markup = '<foo xml:lang="fr">bar</foo>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual(str(soup.foo), markup)
|
||||
self.assertEqual(unicode(soup.foo), markup)
|
||||
|
||||
class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
|
||||
"""Smoke test for a tree builder that supports HTML5."""
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Tests of the builder registry."""
|
||||
|
||||
import unittest
|
||||
import warnings
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.builder import (
|
||||
@@ -68,15 +67,10 @@ class BuiltInRegistryTest(unittest.TestCase):
|
||||
HTMLParserTreeBuilder)
|
||||
|
||||
def test_beautifulsoup_constructor_does_lookup(self):
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
# This will create a warning about not explicitly
|
||||
# specifying a parser, but we'll ignore it.
|
||||
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
# You can pass in a string.
|
||||
BeautifulSoup("", features="html")
|
||||
# Or a list of strings.
|
||||
BeautifulSoup("", features=["html", "fast"])
|
||||
|
||||
# You'll get an exception if BS can't find an appropriate
|
||||
# builder.
|
||||
|
||||
@@ -5,7 +5,7 @@ import warnings
|
||||
try:
|
||||
from bs4.builder import HTML5TreeBuilder
|
||||
HTML5LIB_PRESENT = True
|
||||
except ImportError as e:
|
||||
except ImportError, e:
|
||||
HTML5LIB_PRESENT = False
|
||||
from bs4.element import SoupStrainer
|
||||
from bs4.testing import (
|
||||
@@ -74,25 +74,12 @@ class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
|
||||
def test_reparented_markup(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
|
||||
def test_reparented_markup_ends_with_whitespace(self):
|
||||
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
|
||||
soup = self.soup(markup)
|
||||
self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
|
||||
self.assertEqual(2, len(soup.find_all('p')))
|
||||
|
||||
def test_processing_instruction(self):
|
||||
"""Processing instructions become comments."""
|
||||
markup = b"""<?PITarget PIContent?>"""
|
||||
soup = self.soup(markup)
|
||||
assert str(soup).startswith("<!--?PITarget PIContent?-->")
|
||||
|
||||
def test_cloned_multivalue_node(self):
|
||||
markup = b"""<a class="my_class"><p></a>"""
|
||||
soup = self.soup(markup)
|
||||
a1, a2 = soup.find_all('a')
|
||||
self.assertEqual(a1, a2)
|
||||
assert a1 is not a2
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user