mirror of
https://git.yoctoproject.org/poky
synced 2026-02-21 17:09:42 +01:00
Compare commits
169 Commits
1.3_M1.rc1
...
denzil-7.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
73cdebf60d | ||
|
|
6b06a4fa1b | ||
|
|
2dcbb48df9 | ||
|
|
fb2335fa2b | ||
|
|
e972d78009 | ||
|
|
91d6344765 | ||
|
|
a707b3269c | ||
|
|
0d3748ca5d | ||
|
|
5f2b526109 | ||
|
|
6bb0fdda40 | ||
|
|
5ad28e97e6 | ||
|
|
495ea21c8b | ||
|
|
94e3e894d0 | ||
|
|
8389decfe6 | ||
|
|
7412611252 | ||
|
|
026d502b2a | ||
|
|
e81f7c6152 | ||
|
|
119e1b7dc9 | ||
|
|
5b3a0eac61 | ||
|
|
4c4924ad1b | ||
|
|
9e6d1101b4 | ||
|
|
2bddf70a84 | ||
|
|
6698060d8e | ||
|
|
bd3cd64da3 | ||
|
|
c88f25ddb4 | ||
|
|
ef215694de | ||
|
|
71a6fb605a | ||
|
|
bfc8589048 | ||
|
|
6514e193ac | ||
|
|
44fb9daa81 | ||
|
|
309b2c090e | ||
|
|
67c7bc5e6c | ||
|
|
e06e502bbb | ||
|
|
89c0e81273 | ||
|
|
78a5471a29 | ||
|
|
b30a243f3f | ||
|
|
20657c1fa0 | ||
|
|
3029a08744 | ||
|
|
d376a4e8f1 | ||
|
|
0d0846e06f | ||
|
|
59ac33c77f | ||
|
|
3cb36a5ed9 | ||
|
|
75e32007ef | ||
|
|
d52e74cee9 | ||
|
|
f1630d3cd4 | ||
|
|
b7f1a8f870 | ||
|
|
015f117d85 | ||
|
|
b8338046ba | ||
|
|
7552ccd06c | ||
|
|
e24d5cc2cd | ||
|
|
1f2fc974df | ||
|
|
a473ba170d | ||
|
|
a5fe09c6aa | ||
|
|
2072256b05 | ||
|
|
b623203ac9 | ||
|
|
c7e4a6ae2c | ||
|
|
1628159028 | ||
|
|
b477f676e3 | ||
|
|
9d2534ab24 | ||
|
|
df815f20c8 | ||
|
|
b64eefe2bb | ||
|
|
4abd299bf0 | ||
|
|
30c3c8420e | ||
|
|
a74fb01b6b | ||
|
|
c003c04590 | ||
|
|
35cc0b023f | ||
|
|
c2826b50ce | ||
|
|
75225bcc84 | ||
|
|
d3204ddc12 | ||
|
|
7c7ac8548d | ||
|
|
bbf95cae4c | ||
|
|
3df821277d | ||
|
|
4f4685469a | ||
|
|
d1bc1191d6 | ||
|
|
5c507a2fd7 | ||
|
|
bf4740cf66 | ||
|
|
24ffb5c0b1 | ||
|
|
a92fed4fe5 | ||
|
|
a518e1e3b1 | ||
|
|
fc9716930a | ||
|
|
f99ced96cf | ||
|
|
d0f0d1b41d | ||
|
|
77203b75f5 | ||
|
|
a0f1aca7a0 | ||
|
|
b3de1f1140 | ||
|
|
6e93ac2581 | ||
|
|
c1bfbf7168 | ||
|
|
5a7d852a94 | ||
|
|
3bf8069100 | ||
|
|
cbd192a6c5 | ||
|
|
6d22ae627b | ||
|
|
49a58c65b6 | ||
|
|
06cde35657 | ||
|
|
196a62b50c | ||
|
|
8ddaa3ede8 | ||
|
|
52ccf5a9eb | ||
|
|
b2c9b25f97 | ||
|
|
5a1fb95a8d | ||
|
|
22b9983cc7 | ||
|
|
7f5e6a1959 | ||
|
|
01025ad2c4 | ||
|
|
71580376c9 | ||
|
|
0774a11505 | ||
|
|
4d2d5abd8b | ||
|
|
884034b256 | ||
|
|
ee98021efe | ||
|
|
f52747d7a2 | ||
|
|
1bf998fe41 | ||
|
|
1b3c00a34f | ||
|
|
b3f870297e | ||
|
|
93deb57c91 | ||
|
|
2883b754a1 | ||
|
|
86325bbc5d | ||
|
|
746d718f53 | ||
|
|
c498338197 | ||
|
|
ba554bd865 | ||
|
|
0dda5d88a5 | ||
|
|
06f44161f1 | ||
|
|
4b4a018466 | ||
|
|
caf6532b51 | ||
|
|
a81cb954bb | ||
|
|
0089bb9ad0 | ||
|
|
ce8d4157db | ||
|
|
66b18cb5cd | ||
|
|
bbf33914ea | ||
|
|
e1e12bfd0c | ||
|
|
bc7f18c61d | ||
|
|
89e2958475 | ||
|
|
943c6917e6 | ||
|
|
2b6e86beae | ||
|
|
42a9a50771 | ||
|
|
74c34c9d3c | ||
|
|
e9b8cf485c | ||
|
|
2863d953bd | ||
|
|
716bdd4bf5 | ||
|
|
dfecd3e3d7 | ||
|
|
142de43be2 | ||
|
|
752c707df3 | ||
|
|
d20a24310e | ||
|
|
8e04664ffd | ||
|
|
3ab5d73f0c | ||
|
|
33f048240d | ||
|
|
0bf04aa4ad | ||
|
|
612555e6fe | ||
|
|
35196ff703 | ||
|
|
0a48c697d7 | ||
|
|
7e56770a60 | ||
|
|
9a548f0ee4 | ||
|
|
946c650a47 | ||
|
|
f99c947c32 | ||
|
|
9ffbd2ef22 | ||
|
|
66625417b4 | ||
|
|
e95ce40abd | ||
|
|
4a83ebbee0 | ||
|
|
90705b36ad | ||
|
|
be5a5c7e7b | ||
|
|
64471e9340 | ||
|
|
4becd60e65 | ||
|
|
9fcfda78b9 | ||
|
|
8558c3e1f4 | ||
|
|
0bfb42dbb6 | ||
|
|
37b069ea5d | ||
|
|
6d7260e8f6 | ||
|
|
236bda9ed6 | ||
|
|
375835092c | ||
|
|
2c3d4f5bee | ||
|
|
45114a9df0 | ||
|
|
08290c6003 | ||
|
|
729e7f774c |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,3 @@
|
||||
bitbake
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
|
||||
2
README
2
README
@@ -18,7 +18,7 @@ e.g. for the hardware support. Poky is in turn a component of the Yocto Project.
|
||||
|
||||
The Yocto Project has extensive documentation about the system including a
|
||||
reference manual which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
http://yoctoproject.org/community/documentation
|
||||
|
||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
||||
|
||||
@@ -56,11 +56,10 @@ class BBConfiguration(object):
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
if config.ui:
|
||||
interface = config.ui
|
||||
else:
|
||||
interface = 'knotty'
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
|
||||
|
||||
import unittest
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
|
||||
os.path.abspath(__file__))), 'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
import gtk
|
||||
import optparse
|
||||
import pygtk
|
||||
|
||||
from bb.ui.crumbs.hig import DeployImageDialog, ImageSelectionDialog, CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Maker"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
def __init__(self, image_path=''):
|
||||
super(DeployWindow, self).__init__()
|
||||
|
||||
if len(image_path) > 0:
|
||||
valid = True
|
||||
if not os.path.exists(image_path):
|
||||
valid = False
|
||||
lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> button to select an image." % image_path
|
||||
else:
|
||||
image_path = os.path.abspath(image_path)
|
||||
extend_name = os.path.splitext(image_path)[1][1:]
|
||||
if extend_name not in DEPLOYABLE_IMAGE_TYPES:
|
||||
valid = False
|
||||
lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> button to select an image." % extend_name
|
||||
|
||||
if not valid:
|
||||
image_path = ''
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
self.deploy_dialog = DeployImageDialog(Title, image_path, self,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
|
||||
close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(close_button)
|
||||
close_button.connect('clicked', gtk.main_quit)
|
||||
|
||||
make_button = self.deploy_dialog.add_button("Make USB image", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(make_button)
|
||||
|
||||
self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
|
||||
self.deploy_dialog.connect('destroy', gtk.main_quit)
|
||||
response = self.deploy_dialog.show()
|
||||
|
||||
def select_image_clicked_cb(self, dialog):
|
||||
cwd = os.getcwd()
|
||||
dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(button)
|
||||
response = dialog.run()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
# get the full path of image
|
||||
image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
|
||||
self.deploy_dialog.set_image_text_buffer(image_path)
|
||||
self.deploy_dialog.set_image_path(image_path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog [-h] [image_file]
|
||||
|
||||
%prog writes bootable images to USB devices. You can
|
||||
provide the image file on the command line or select it using the GUI.""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
image_file = args[1] if len(args) > 1 else ''
|
||||
dw = DeployWindow(image_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc(3)
|
||||
@@ -103,13 +103,7 @@ Show debug logging for the specified logging domains
|
||||
.TP
|
||||
.B \-P, \-\-profile
|
||||
profile the command and print a report
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
bitbake uses the following environment variables to control its
|
||||
operation:
|
||||
.TP
|
||||
.B BITBAKE_UI
|
||||
The bitbake user interface; overridden by the \fB-u\fP commandline option.
|
||||
|
||||
.SH AUTHORS
|
||||
BitBake was written by
|
||||
|
||||
@@ -174,19 +174,8 @@ def exec_func(func, d, dirs = None):
|
||||
lockfiles = None
|
||||
|
||||
tempdir = data.getVar('T', d, 1)
|
||||
|
||||
# or func allows items to be executed outside of the normal
|
||||
# task set, such as buildhistory
|
||||
task = data.getVar('BB_RUNTASK', d, 1) or func
|
||||
if task == func:
|
||||
taskfunc = task
|
||||
else:
|
||||
taskfunc = "%s.%s" % (task, func)
|
||||
|
||||
runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}"
|
||||
runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
|
||||
runfile = os.path.join(tempdir, runfn)
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
|
||||
|
||||
with bb.utils.fileslocked(lockfiles):
|
||||
if ispython:
|
||||
@@ -217,8 +206,6 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
olddir = None
|
||||
os.chdir(cwd)
|
||||
|
||||
bb.debug(2, "Executing python function %s" % func)
|
||||
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
@@ -228,15 +215,13 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
|
||||
if cwd and olddir:
|
||||
try:
|
||||
os.chdir(olddir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def exec_func_shell(func, d, runfile, cwd=None):
|
||||
def exec_func_shell(function, d, runfile, cwd=None):
|
||||
"""Execute a shell function from the metadata
|
||||
|
||||
Note on directory behavior. The 'dirs' varflag should contain a list
|
||||
@@ -249,18 +234,18 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
|
||||
with open(runfile, 'w') as script:
|
||||
script.write('#!/bin/sh -e\n')
|
||||
data.emit_func(func, script, d)
|
||||
data.emit_func(function, script, d)
|
||||
|
||||
if bb.msg.loggerVerboseLogs:
|
||||
script.write("set -x\n")
|
||||
if cwd:
|
||||
script.write("cd %s\n" % cwd)
|
||||
script.write("%s\n" % func)
|
||||
script.write("%s\n" % function)
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot'):
|
||||
if d.getVarFlag(function, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
@@ -270,15 +255,11 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
bb.process.run(cmd, shell=False, stdin=NULL, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
raise FuncFailed(function, logfn)
|
||||
|
||||
def _task_data(fn, task, d):
|
||||
localdata = data.createCopy(d)
|
||||
@@ -309,23 +290,8 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bb.fatal("T variable not set, unable to build")
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
# Determine the logfile to generate
|
||||
logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
|
||||
logbase = logfmt.format(task=task, pid=os.getpid())
|
||||
|
||||
# Document the order of the tasks...
|
||||
logorder = os.path.join(tempdir, 'log.task_order')
|
||||
try:
|
||||
logorderfile = file(logorder, 'a')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logorder)
|
||||
pass
|
||||
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
|
||||
logorderfile.close()
|
||||
|
||||
# Setup the courtesy link to the logfn
|
||||
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
|
||||
logbase = 'log.{0}.{1}'.format(task, os.getpid())
|
||||
logfn = os.path.join(tempdir, logbase)
|
||||
if loglink:
|
||||
bb.utils.remove(loglink)
|
||||
@@ -348,7 +314,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
# Handle logfiles
|
||||
si = file('/dev/null', 'r')
|
||||
try:
|
||||
bb.utils.mkdirhier(os.path.dirname(logfn))
|
||||
logfile = file(logfn, 'w')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logfn)
|
||||
@@ -375,7 +340,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bblogger.addHandler(errchk)
|
||||
|
||||
localdata.setVar('BB_LOGFILE', logfn)
|
||||
localdata.setVar('BB_RUNTASK', task)
|
||||
|
||||
event.fire(TaskStarted(task, localdata), localdata)
|
||||
try:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Cache implementation
|
||||
# BitBake 'Event' implementation
|
||||
#
|
||||
# Caching of bitbake variables before task execution
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
|
||||
# but small sections based on code from bin/bitbake:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
@@ -43,7 +42,7 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "144"
|
||||
__cache_version__ = "143"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -76,13 +75,9 @@ class RecipeInfoCommon(object):
|
||||
for task in tasks)
|
||||
|
||||
@classmethod
|
||||
def flaglist(cls, flag, varlist, metadata, squash=False):
|
||||
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
|
||||
def flaglist(cls, flag, varlist, metadata):
|
||||
return dict((var, metadata.getVarFlag(var, flag, True))
|
||||
for var in varlist)
|
||||
if squash:
|
||||
return dict((k,v) for (k,v) in out_dict.iteritems() if v)
|
||||
else:
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata):
|
||||
@@ -132,7 +127,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.stamp = self.getvar('STAMP', metadata)
|
||||
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||
self.depends = self.depvar('DEPENDS', metadata)
|
||||
self.provides = self.depvar('PROVIDES', metadata)
|
||||
@@ -159,7 +153,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.stamp = {}
|
||||
cachedata.stamp_base = {}
|
||||
cachedata.stamp_extrainfo = {}
|
||||
cachedata.file_checksums = {}
|
||||
cachedata.fn_provides = {}
|
||||
cachedata.pn_provides = defaultdict(list)
|
||||
cachedata.all_depends = []
|
||||
@@ -191,7 +184,6 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.stamp[fn] = self.stamp
|
||||
cachedata.stamp_base[fn] = self.stamp_base
|
||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||
cachedata.file_checksums[fn] = self.file_checksums
|
||||
|
||||
provides = [self.pn]
|
||||
for provide in self.provides:
|
||||
@@ -711,115 +703,4 @@ class CacheData(object):
|
||||
for info in info_array:
|
||||
info.add_cacheData(self, fn)
|
||||
|
||||
|
||||
class MultiProcessCache(object):
|
||||
"""
|
||||
BitBake multi-process cache implementation
|
||||
|
||||
Used by the codeparser & file checksum caches
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cachefile = None
|
||||
self.cachedata = self.create_cachedata()
|
||||
self.cachedata_extras = self.create_cachedata()
|
||||
|
||||
def init_cache(self, d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
return
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
|
||||
def save_extras(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(self.cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for j in range(0,len(dest)):
|
||||
for h in source[j]:
|
||||
if h not in dest[j]:
|
||||
dest[j][h] = source[j][h]
|
||||
|
||||
def save_merge(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile, "wb"), -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# Local file checksum cache implementation
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import stat
|
||||
import bb.utils
|
||||
import logging
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
# mtime cache (non-persistent)
|
||||
# based upon the assumption that files do not change during bitbake run
|
||||
class FileMtimeCache(object):
|
||||
cache = {}
|
||||
|
||||
def cached_mtime(self, f):
|
||||
if f not in self.cache:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def cached_mtime_noerror(self, f):
|
||||
if f not in self.cache:
|
||||
try:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return self.cache[f]
|
||||
|
||||
def update_mtime(self, f):
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def clear(self):
|
||||
self.cache.clear()
|
||||
|
||||
# Checksum + mtime cache (persistent)
|
||||
class FileChecksumCache(MultiProcessCache):
|
||||
cache_file_name = "local_file_checksum_cache.dat"
|
||||
CACHE_VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
self.mtime_cache = FileMtimeCache()
|
||||
MultiProcessCache.__init__(self)
|
||||
|
||||
def get_checksum(self, f):
|
||||
entry = self.cachedata[0].get(f)
|
||||
cmtime = self.mtime_cache.cached_mtime(f)
|
||||
if entry:
|
||||
(mtime, hashval) = entry
|
||||
if cmtime == mtime:
|
||||
return hashval
|
||||
else:
|
||||
bb.debug(2, "file %s changed mtime, recompute checksum" % f)
|
||||
|
||||
hashval = bb.utils.md5_file(f)
|
||||
self.cachedata_extras[0][f] = (cmtime, hashval)
|
||||
return hashval
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for h in source[0]:
|
||||
if h in dest:
|
||||
(smtime, _) = source[0][h]
|
||||
(dmtime, _) = dest[0][h]
|
||||
if smtime > dmtime:
|
||||
dest[0][h] = source[0][h]
|
||||
else:
|
||||
dest[0][h] = source[0][h]
|
||||
@@ -5,10 +5,10 @@ import os.path
|
||||
import bb.utils, bb.data
|
||||
from itertools import chain
|
||||
from pysh import pyshyacc, pyshlex, sherrors
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake.CodeParser')
|
||||
PARSERCACHE_VERSION = 2
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
@@ -32,56 +32,133 @@ def check_indent(codestr):
|
||||
|
||||
return codestr
|
||||
|
||||
pythonparsecache = {}
|
||||
shellparsecache = {}
|
||||
pythonparsecacheextras = {}
|
||||
shellparsecacheextras = {}
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 2
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
def init_cache(self, d):
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
|
||||
codeparsercache = CodeParserCache()
|
||||
def parser_cachefile(d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return None
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
|
||||
logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
|
||||
return cachefile
|
||||
|
||||
def parser_cache_init(d):
|
||||
codeparsercache.init_cache(d)
|
||||
global pythonparsecache
|
||||
global shellparsecache
|
||||
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
return
|
||||
|
||||
pythonparsecache = data[0]
|
||||
shellparsecache = data[1]
|
||||
|
||||
def parser_cache_save(d):
|
||||
codeparsercache.save_extras(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
shellcache = {}
|
||||
pythoncache = {}
|
||||
|
||||
lf = bb.utils.lockfile(cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
shellcache = shellparsecacheextras
|
||||
pythoncache = pythonparsecacheextras
|
||||
|
||||
p = pickle.Pickler(file(cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([[pythoncache, shellcache], PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def internSet(items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def parser_cache_savemerge(d):
|
||||
codeparsercache.save_merge(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
data = [{}, {}]
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(cachefile)) if y.startswith(os.path.basename(cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = [{}, {}], None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
continue
|
||||
|
||||
for h in extradata[0]:
|
||||
if h not in data[0]:
|
||||
data[0][h] = extradata[0][h]
|
||||
for h in extradata[1]:
|
||||
if h not in data[1]:
|
||||
data[1][h] = extradata[1][h]
|
||||
os.unlink(f)
|
||||
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = internSet(data[0][h]["execs"])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = internSet(data[1][h]["execs"])
|
||||
|
||||
p = pickle.Pickler(file(cachefile, "wb"), -1)
|
||||
p.dump([data, PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
Logger = logging.getLoggerClass()
|
||||
class BufferedLogger(Logger):
|
||||
@@ -158,14 +235,14 @@ class PythonParser():
|
||||
def parse_python(self, node):
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
if h in pythonparsecache:
|
||||
self.references = pythonparsecache[h]["refs"]
|
||||
self.execs = pythonparsecache[h]["execs"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
if h in pythonparsecacheextras:
|
||||
self.references = pythonparsecacheextras[h]["refs"]
|
||||
self.execs = pythonparsecacheextras[h]["execs"]
|
||||
return
|
||||
|
||||
|
||||
@@ -179,9 +256,9 @@ class PythonParser():
|
||||
self.references.update(self.var_references)
|
||||
self.references.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
pythonparsecacheextras[h] = {}
|
||||
pythonparsecacheextras[h]["refs"] = self.references
|
||||
pythonparsecacheextras[h]["execs"] = self.execs
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -199,12 +276,12 @@ class ShellParser():
|
||||
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
if h in shellparsecache:
|
||||
self.execs = shellparsecache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
if h in shellparsecacheextras:
|
||||
self.execs = shellparsecacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
try:
|
||||
@@ -216,8 +293,8 @@ class ShellParser():
|
||||
self.process_tokens(token)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
shellparsecacheextras[h] = {}
|
||||
shellparsecacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -985,12 +985,12 @@ class BBCooker:
|
||||
"""
|
||||
Find the .bb files which match the expression in 'buildfile'.
|
||||
"""
|
||||
|
||||
if bf.startswith("/") or bf.startswith("../"):
|
||||
bf = os.path.abspath(bf)
|
||||
filelist, masked = self.collect_bbfiles()
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
return [bf]
|
||||
except OSError:
|
||||
regexp = re.compile(bf)
|
||||
@@ -1570,7 +1570,6 @@ class CookerParser(object):
|
||||
def init():
|
||||
Parser.cfg = self.cfgdata
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
|
||||
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
|
||||
|
||||
self.feeder_quit = multiprocessing.Queue(maxsize=1)
|
||||
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
@@ -1622,7 +1621,6 @@ class CookerParser(object):
|
||||
sync.start()
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.configuration.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.configuration.data)
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
@@ -1646,8 +1644,6 @@ class CookerParser(object):
|
||||
yield result
|
||||
|
||||
def parse_next(self):
|
||||
result = []
|
||||
parsed = None
|
||||
try:
|
||||
parsed, result = self.results.next()
|
||||
except StopIteration:
|
||||
@@ -1657,13 +1653,9 @@ class CookerParser(object):
|
||||
logger.error('Unable to parse %s: %s' %
|
||||
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
|
||||
self.shutdown(clean=False)
|
||||
except bb.parse.ParseError as exc:
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
logger.error(str(exc))
|
||||
self.shutdown(clean=False)
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
_, value, _ = sys.exc_info()
|
||||
logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
|
||||
self.shutdown(clean=False)
|
||||
except SyntaxError as exc:
|
||||
logger.error('Unable to parse %s', exc.recipe)
|
||||
self.shutdown(clean=False)
|
||||
|
||||
@@ -279,12 +279,7 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
deps = set()
|
||||
vardeps = d.getVarFlag(key, "vardeps", True)
|
||||
try:
|
||||
if key[-1] == ']':
|
||||
vf = key[:-1].split('[')
|
||||
value = d.getVarFlag(vf[0], vf[1], False)
|
||||
else:
|
||||
value = d.getVar(key, False)
|
||||
|
||||
value = d.getVar(key, False)
|
||||
if key in vardepvals:
|
||||
value = d.getVarFlag(key, "vardepvalue", True)
|
||||
elif d.getVarFlag(key, "func"):
|
||||
@@ -306,19 +301,6 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
|
||||
# Add varflags, assuming an exclusion list is set
|
||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
|
||||
if varflagsexcl:
|
||||
varfdeps = []
|
||||
varflags = d.getVarFlags(key)
|
||||
if varflags:
|
||||
for f in varflags:
|
||||
if f not in varflagsexcl:
|
||||
varfdeps.append('%s[%s]' % (key, f))
|
||||
if varfdeps:
|
||||
deps |= set(varfdeps)
|
||||
|
||||
deps |= set((vardeps or "").split())
|
||||
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
|
||||
except:
|
||||
|
||||
@@ -102,10 +102,7 @@ class ExpansionError(Exception):
|
||||
self.expression = expression
|
||||
self.variablename = varname
|
||||
self.exception = exception
|
||||
if varname:
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
else:
|
||||
self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
Exception.__init__(self, self.msg)
|
||||
self.args = (varname, expression, exception)
|
||||
def __str__(self):
|
||||
|
||||
@@ -32,14 +32,7 @@ class TracebackEntry(namedtuple.abc):
|
||||
def _get_frame_args(frame):
|
||||
"""Get the formatted arguments and class (if available) for a frame"""
|
||||
arginfo = inspect.getargvalues(frame)
|
||||
|
||||
try:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
# There have been reports from the field of python 2.6 which doesn't
|
||||
# return a namedtuple here but simply a tuple so fallback gracefully if
|
||||
# args isn't present.
|
||||
except AttributeError:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
|
||||
firstarg = arginfo.args[0]
|
||||
|
||||
@@ -8,7 +8,6 @@ BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -29,13 +28,10 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import logging
|
||||
import urllib
|
||||
import bb.persist_data, bb.utils
|
||||
import bb.checksum
|
||||
from bb import data
|
||||
|
||||
__version__ = "2"
|
||||
_checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
@@ -67,9 +63,6 @@ class FetchError(BBFetchException):
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
|
||||
class ChecksumError(FetchError):
|
||||
"""Exception when mismatched checksum encountered"""
|
||||
|
||||
class UnpackError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly when unpacking"""
|
||||
def __init__(self, message, url):
|
||||
@@ -106,15 +99,12 @@ class ParameterError(BBFetchException):
|
||||
class NetworkAccess(BBFetchException):
|
||||
"""Exception raised when network access is disabled but it is required."""
|
||||
def __init__(self, url, cmd):
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access requested with command %s (for url %s)" % (cmd, url)
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url)
|
||||
self.url = url
|
||||
self.cmd = cmd
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url, cmd)
|
||||
|
||||
class NonLocalMethod(Exception):
|
||||
def __init__(self):
|
||||
Exception.__init__(self)
|
||||
|
||||
def decodeurl(url):
|
||||
"""Decodes an URL into the tokens (scheme, network location, path,
|
||||
@@ -154,14 +144,14 @@ def decodeurl(url):
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
return (type, host, path, user, pswd, p)
|
||||
|
||||
def encodeurl(decoded):
|
||||
"""Encodes a URL from tokens (scheme, network location, path,
|
||||
user, password, parameters).
|
||||
"""
|
||||
|
||||
type, host, path, user, pswd, p = decoded
|
||||
(type, host, path, user, pswd, p) = decoded
|
||||
|
||||
if not path:
|
||||
raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
|
||||
@@ -175,7 +165,7 @@ def encodeurl(decoded):
|
||||
url += "@"
|
||||
if host and type != "file":
|
||||
url += "%s" % host
|
||||
url += "%s" % urllib.quote(path)
|
||||
url += "%s" % path
|
||||
if p:
|
||||
for parm in p:
|
||||
url += ";%s=%s" % (parm, p[parm])
|
||||
@@ -239,18 +229,10 @@ def fetcher_init(d):
|
||||
else:
|
||||
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
|
||||
_checksum_cache.init_cache(d)
|
||||
|
||||
for m in methods:
|
||||
if hasattr(m, "init"):
|
||||
m.init(d)
|
||||
|
||||
def fetcher_parse_save(d):
|
||||
_checksum_cache.save_extras(d)
|
||||
|
||||
def fetcher_parse_done(d):
|
||||
_checksum_cache.save_merge(d)
|
||||
|
||||
def fetcher_compare_revisions(d):
|
||||
"""
|
||||
Compare the revisions in the persistant cache with current values and
|
||||
@@ -277,37 +259,39 @@ def verify_checksum(u, ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
Raises a FetchError if one or both of the SRC_URI checksums do not match
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
return value:
|
||||
- True: a checksum matched
|
||||
- False: neither checksum matched
|
||||
|
||||
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||
matched
|
||||
"""
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
||||
return
|
||||
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
|
||||
md5mismatch = False
|
||||
sha256mismatch = False
|
||||
@@ -328,7 +312,7 @@ def verify_checksum(u, ud, d):
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
|
||||
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, u)
|
||||
raise FetchError('Checksum mismatch!%s' % msg, u)
|
||||
|
||||
|
||||
def update_stamp(u, ud, d):
|
||||
@@ -492,8 +476,10 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
if os.path.exists(ud.localpath):
|
||||
open(ud.donestamp, 'w').close()
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
|
||||
if not ud.localpath or not os.path.exists(ud.localpath):
|
||||
continue
|
||||
@@ -505,7 +491,6 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
# If that tarball is a local file:// we need to provide a symlink to it
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
@@ -513,21 +498,17 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
# Otherwise the result is a local file:// and we symlink to it
|
||||
if not os.path.exists(origud.localpath):
|
||||
os.symlink(ud.localpath, origud.localpath)
|
||||
update_stamp(newuri, origud, ld)
|
||||
return ud.localpath
|
||||
|
||||
except bb.fetch2.NetworkAccess:
|
||||
raise
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (newuri, origud.url))
|
||||
logger.warn(str(e))
|
||||
else:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
@@ -564,85 +545,11 @@ def srcrev_internal_helper(ud, d, name):
|
||||
|
||||
return rev
|
||||
|
||||
|
||||
def get_checksum_file_list(d):
|
||||
""" Get a list of files checksum in SRC_URI
|
||||
|
||||
Returns the resolved local paths of all local file entries in
|
||||
SRC_URI as a space-separated string
|
||||
"""
|
||||
fetch = Fetch([], d, cache = False, localonly = True)
|
||||
|
||||
dl_dir = d.getVar('DL_DIR', True)
|
||||
filelist = []
|
||||
for u in fetch.urls:
|
||||
ud = fetch.ud[u]
|
||||
|
||||
if ud and isinstance(ud.method, local.Local):
|
||||
ud.setup_localpath(d)
|
||||
f = ud.localpath
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
continue
|
||||
filelist.append(f)
|
||||
|
||||
return " ".join(filelist)
|
||||
|
||||
|
||||
def get_file_checksums(filelist, pn):
|
||||
"""Get a list of the checksums for a list of local files
|
||||
|
||||
Returns the checksums for a list of local files, caching the results as
|
||||
it proceeds
|
||||
|
||||
"""
|
||||
|
||||
def checksum_file(f):
|
||||
try:
|
||||
checksum = _checksum_cache.get_checksum(f)
|
||||
except OSError as e:
|
||||
import traceback
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
||||
return None
|
||||
return checksum
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
import glob
|
||||
for f in glob.glob(pth):
|
||||
checksum = checksum_file(f)
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
# Handle directories
|
||||
for root, dirs, files in os.walk(pth):
|
||||
for name in files:
|
||||
fullpth = os.path.join(root, name)
|
||||
checksum = checksum_file(fullpth)
|
||||
if checksum:
|
||||
checksums.append((fullpth, checksum))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort()
|
||||
return checksums
|
||||
|
||||
|
||||
class FetchData(object):
|
||||
"""
|
||||
A class which represents the fetcher state for a given URI.
|
||||
"""
|
||||
def __init__(self, url, d, localonly = False):
|
||||
def __init__(self, url, d):
|
||||
# localpath is the location of a downloaded result. If not set, the file is local.
|
||||
self.donestamp = None
|
||||
self.localfile = ""
|
||||
@@ -667,14 +574,10 @@ class FetchData(object):
|
||||
self.sha256_name = "sha256sum"
|
||||
if self.md5_name in self.parm:
|
||||
self.md5_expected = self.parm[self.md5_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.md5_expected = None
|
||||
else:
|
||||
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
|
||||
if self.sha256_name in self.parm:
|
||||
self.sha256_expected = self.parm[self.sha256_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
|
||||
@@ -689,9 +592,6 @@ class FetchData(object):
|
||||
if not self.method:
|
||||
raise NoMethodError(url)
|
||||
|
||||
if localonly and not isinstance(self.method, local.Local):
|
||||
raise NonLocalMethod()
|
||||
|
||||
if hasattr(self.method, "urldata_init"):
|
||||
self.method.urldata_init(self, d)
|
||||
|
||||
@@ -756,26 +656,6 @@ class FetchMethod(object):
|
||||
"""
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Is localpath something that can be represented by a checksum?
|
||||
"""
|
||||
|
||||
# We cannot compute checksums for directories
|
||||
if os.path.isdir(urldata.localpath) == True:
|
||||
return False
|
||||
if urldata.localpath.find("*") != -1:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
"""
|
||||
Is the backend on where checksumming is recommended (should warnings
|
||||
by displayed if there is no checksum)?
|
||||
"""
|
||||
return False
|
||||
|
||||
def _strip_leading_slashes(self, relpath):
|
||||
"""
|
||||
Remove leading slash as os.path.join can't cope
|
||||
@@ -826,7 +706,7 @@ class FetchMethod(object):
|
||||
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
efile = os.path.join(data.getVar('WORKDIR', True),os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -902,9 +782,7 @@ class FetchMethod(object):
|
||||
bb.utils.mkdirhier(newdir)
|
||||
os.chdir(newdir)
|
||||
|
||||
path = data.getVar('PATH', True)
|
||||
if path:
|
||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
||||
cmd = "PATH=\"%s\" %s" % (data.getVar('PATH', True), cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
||||
|
||||
@@ -1015,10 +893,7 @@ class FetchMethod(object):
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True, localonly = False):
|
||||
if localonly and cache:
|
||||
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
|
||||
|
||||
def __init__(self, urls, d, cache = True):
|
||||
if len(urls) == 0:
|
||||
urls = d.getVar("SRC_URI", True).split()
|
||||
self.urls = urls
|
||||
@@ -1031,12 +906,7 @@ class Fetch(object):
|
||||
|
||||
for url in urls:
|
||||
if url not in self.ud:
|
||||
try:
|
||||
self.ud[url] = FetchData(url, d, localonly)
|
||||
except NonLocalMethod:
|
||||
if localonly:
|
||||
self.ud[url] = None
|
||||
pass
|
||||
self.ud[url] = FetchData(url, d)
|
||||
|
||||
if cache:
|
||||
urldata_cache[fn] = self.ud
|
||||
@@ -1108,14 +978,12 @@ class Fetch(object):
|
||||
raise
|
||||
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Checksum error encountered with download (will attempt other sources): %s" % str(e))
|
||||
else:
|
||||
logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.debug(1, str(e))
|
||||
logger.warn('Failed to fetch URL %s' % u)
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
# Remove any incomplete fetch
|
||||
m.clean(ud, self.d)
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
|
||||
@@ -82,9 +82,6 @@ class Git(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['git']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init git specific variable within url data
|
||||
@@ -126,8 +123,7 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
# Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
|
||||
if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
|
||||
if ud.revisions[name]:
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.'))
|
||||
@@ -139,9 +135,8 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
gitsrcname = gitsrcname + '_' + ud.revisions[name]
|
||||
ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
|
||||
ud.clonedir = os.path.join(gitdir, gitsrcname)
|
||||
ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
|
||||
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
ud.localfile = ud.clonedir
|
||||
|
||||
@@ -189,8 +184,7 @@ class Git(FetchMethod):
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
runfetchcmd(clone_cmd, d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
@@ -208,8 +202,7 @@ class Git(FetchMethod):
|
||||
|
||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
|
||||
fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
@@ -288,8 +281,7 @@ class Git(FetchMethod):
|
||||
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
if not output:
|
||||
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
|
||||
|
||||
@@ -27,7 +27,6 @@ BitBake build tools.
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
@@ -91,8 +90,8 @@ class Perforce(FetchMethod):
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.strip()
|
||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.readline().strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
@@ -155,8 +154,8 @@ class Perforce(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -169,8 +168,7 @@ class Perforce(FetchMethod):
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + loc)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = p4file.strip()
|
||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
|
||||
if not p4file:
|
||||
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
|
||||
@@ -186,7 +184,7 @@ class Perforce(FetchMethod):
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
|
||||
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
|
||||
@@ -69,9 +69,6 @@ class SSH(FetchMethod):
|
||||
def supports(self, url, urldata, d):
|
||||
return __pattern__.match(url) != None
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
|
||||
@@ -77,8 +77,8 @@ class Svk(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
logger.error()
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -45,9 +45,6 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['http', 'https', 'ftp']
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
return True
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
@@ -56,32 +53,39 @@ class Wget(FetchMethod):
|
||||
def download(self, uri, ud, d, checkonly = False):
|
||||
"""Fetch urls"""
|
||||
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
|
||||
def fetch_uri(uri, ud, d):
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND", d, True)
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND", d, True)
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND", d, True)
|
||||
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
fetch_uri(uri, ud, localdata)
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, uri, ud, d):
|
||||
|
||||
@@ -69,7 +69,7 @@ def supports(fn, d):
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
@@ -80,7 +80,7 @@ def inherit(files, fn, lineno, d):
|
||||
__inherit_cache.append( file )
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -126,13 +126,13 @@ def handle(fn, d, include):
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
classes.append(__classname__)
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE')
|
||||
oldfile = data.getVar('FILE', d)
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
|
||||
@@ -781,7 +781,101 @@ class RunQueue:
|
||||
|
||||
self.rqexe = None
|
||||
|
||||
def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
|
||||
def check_stamps(self):
|
||||
unchecked = {}
|
||||
current = []
|
||||
notcurrent = []
|
||||
buildable = []
|
||||
|
||||
if self.stamppolicy == "perfile":
|
||||
fulldeptree = False
|
||||
else:
|
||||
fulldeptree = True
|
||||
stampwhitelist = []
|
||||
if self.stamppolicy == "whitelist":
|
||||
stampwhitelist = self.rqdata.stampfnwhitelist
|
||||
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
unchecked[task] = ""
|
||||
if len(self.rqdata.runq_depends[task]) == 0:
|
||||
buildable.append(task)
|
||||
|
||||
def check_buildable(self, task, buildable):
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
alldeps = 1
|
||||
for dep in self.rqdata.runq_depends[revdep]:
|
||||
if dep in unchecked:
|
||||
alldeps = 0
|
||||
if alldeps == 1:
|
||||
if revdep in unchecked:
|
||||
buildable.append(revdep)
|
||||
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
if task not in unchecked:
|
||||
continue
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
|
||||
while (len(buildable) > 0):
|
||||
nextbuildable = []
|
||||
for task in buildable:
|
||||
if task in unchecked:
|
||||
fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
iscurrent = True
|
||||
|
||||
t1 = os.stat(stampfile)[stat.ST_MTIME]
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if iscurrent:
|
||||
fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
|
||||
taskname2 = self.rqdata.runq_task[dep]
|
||||
stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
|
||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||
if dep in notcurrent:
|
||||
iscurrent = False
|
||||
else:
|
||||
t2 = os.stat(stampfile2)[stat.ST_MTIME]
|
||||
if t1 < t2:
|
||||
iscurrent = False
|
||||
del unchecked[task]
|
||||
if iscurrent:
|
||||
current.append(task)
|
||||
else:
|
||||
notcurrent.append(task)
|
||||
|
||||
check_buildable(self, task, nextbuildable)
|
||||
|
||||
buildable = nextbuildable
|
||||
|
||||
#for task in range(len(self.runq_fnid)):
|
||||
# fn = self.taskData.fn_index[self.runq_fnid[task]]
|
||||
# taskname = self.runq_task[task]
|
||||
# print "%s %s.%s" % (task, taskname, fn)
|
||||
|
||||
#print "Unchecked: %s" % unchecked
|
||||
#print "Current: %s" % current
|
||||
#print "Not current: %s" % notcurrent
|
||||
|
||||
if len(unchecked) > 0:
|
||||
bb.msg.fatal("RunQueue", "check_stamps fatal internal error")
|
||||
return current
|
||||
|
||||
def check_stamp_task(self, task, taskname = None, recurse = False):
|
||||
def get_timestamp(f):
|
||||
try:
|
||||
if not os.access(f, os.F_OK):
|
||||
@@ -817,16 +911,10 @@ class RunQueue:
|
||||
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
||||
return True
|
||||
|
||||
if cache is None:
|
||||
cache = {}
|
||||
|
||||
iscurrent = True
|
||||
t1 = get_timestamp(stampfile)
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if iscurrent:
|
||||
if dep in cache:
|
||||
iscurrent = cache[dep]
|
||||
continue
|
||||
fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
|
||||
taskname2 = self.rqdata.runq_task[dep]
|
||||
stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
|
||||
@@ -843,9 +931,7 @@ class RunQueue:
|
||||
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
|
||||
iscurrent = False
|
||||
if recurse and iscurrent:
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
|
||||
cache[dep] = iscurrent
|
||||
cache[task] = iscurrent
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True)
|
||||
return iscurrent
|
||||
|
||||
def execute_runqueue(self):
|
||||
@@ -955,36 +1041,23 @@ class RunQueueExecute:
|
||||
self.build_stamps = {}
|
||||
self.failed_fnids = []
|
||||
|
||||
self.stampcache = {}
|
||||
|
||||
def runqueue_process_waitpid(self):
|
||||
"""
|
||||
Return none is there are no processes awaiting result collection, otherwise
|
||||
collect the process exit codes and close the information pipe.
|
||||
"""
|
||||
pid, status = os.waitpid(-1, os.WNOHANG)
|
||||
if pid == 0 or os.WIFSTOPPED(status):
|
||||
result = os.waitpid(-1, os.WNOHANG)
|
||||
if result[0] == 0 and result[1] == 0:
|
||||
return None
|
||||
|
||||
if os.WIFEXITED(status):
|
||||
status = os.WEXITSTATUS(status)
|
||||
elif os.WIFSIGNALED(status):
|
||||
# Per shell conventions for $?, when a process exits due to
|
||||
# a signal, we return an exit code of 128 + SIGNUM
|
||||
status = 128 + os.WTERMSIG(status)
|
||||
|
||||
task = self.build_pids[pid]
|
||||
del self.build_pids[pid]
|
||||
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
if pid in self.build_stamps:
|
||||
del self.build_stamps[pid]
|
||||
|
||||
if status != 0:
|
||||
self.task_fail(task, status)
|
||||
task = self.build_pids[result[0]]
|
||||
del self.build_pids[result[0]]
|
||||
self.build_pipes[result[0]].close()
|
||||
del self.build_pipes[result[0]]
|
||||
# self.build_stamps[result[0]] may not exist when use shared work directory.
|
||||
if result[0] in self.build_stamps.keys():
|
||||
del self.build_stamps[result[0]]
|
||||
if result[1] != 0:
|
||||
self.task_fail(task, result[1]>>8)
|
||||
else:
|
||||
self.task_complete(task)
|
||||
return True
|
||||
@@ -1091,6 +1164,8 @@ class RunQueueExecute:
|
||||
os.umask(umask)
|
||||
|
||||
self.cooker.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self)
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn)
|
||||
bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
|
||||
ret = 0
|
||||
try:
|
||||
@@ -1298,7 +1373,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
self.task_skip(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(task, taskname):
|
||||
logger.debug(2, "Stamp current task %s (%s)", task,
|
||||
self.rqdata.get_user_idstring(task))
|
||||
self.task_skip(task)
|
||||
@@ -1482,7 +1557,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
|
||||
continue
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene"):
|
||||
logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
|
||||
stamppresent.append(task)
|
||||
self.task_skip(task)
|
||||
@@ -1575,7 +1650,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
|
||||
|
||||
taskname = self.rqdata.runq_task[realtask] + "_setscene"
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True):
|
||||
logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_failoutright(task)
|
||||
@@ -1587,7 +1662,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.task_failoutright(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname):
|
||||
logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_skip(task)
|
||||
@@ -1701,6 +1776,15 @@ class runQueueTaskCompleted(runQueueEvent):
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
def check_stamp_fn(fn, taskname, d):
|
||||
rqexe = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY")
|
||||
fn = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2")
|
||||
fnid = rqexe.rqdata.taskData.getfn_id(fn)
|
||||
taskid = rqexe.rqdata.get_task_id(fnid, taskname)
|
||||
if taskid is not None:
|
||||
return rqexe.rq.check_stamp_task(taskid)
|
||||
return None
|
||||
|
||||
class runQueuePipe():
|
||||
"""
|
||||
Abstraction for a pipe between a worker thread and the server
|
||||
|
||||
@@ -60,7 +60,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskhash = {}
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
@@ -108,10 +107,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
data = data + dep
|
||||
if dep in lookupcache:
|
||||
var = lookupcache[dep]
|
||||
elif dep[-1] == ']':
|
||||
vf = dep[:-1].split('[')
|
||||
var = d.getVarFlag(vf[0], vf[1], False)
|
||||
lookupcache[dep] = var
|
||||
else:
|
||||
var = d.getVar(dep, False)
|
||||
lookupcache[dep] = var
|
||||
@@ -157,7 +152,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = {}
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
@@ -167,12 +161,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
||||
data = data + self.taskhash[dep]
|
||||
self.runtaskdeps[k].append(dep)
|
||||
|
||||
if task in dataCache.file_checksums[fn]:
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k][f] = cs
|
||||
data = data + cs
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
@@ -209,15 +197,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
if runtime and k in self.taskhash:
|
||||
data['runtaskdeps'] = self.runtaskdeps[k]
|
||||
data['file_checksum_values'] = self.file_checksum_values[k]
|
||||
data['runtaskhashes'] = {}
|
||||
for dep in data['runtaskdeps']:
|
||||
data['runtaskhashes'][dep] = self.taskhash[dep]
|
||||
|
||||
with open(sigfile, "wb") as f:
|
||||
p = pickle.Pickler(f, -1)
|
||||
p.dump(data)
|
||||
os.chmod(sigfile, 0664)
|
||||
p = pickle.Pickler(file(sigfile, "wb"), -1)
|
||||
p.dump(data)
|
||||
|
||||
def dump_sigs(self, dataCache):
|
||||
for fn in self.taskdeps:
|
||||
@@ -265,9 +250,9 @@ def clean_basepaths(a):
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b):
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
p2 = pickle.Unpickler(open(b, "rb"))
|
||||
p2 = pickle.Unpickler(file(b, "rb"))
|
||||
b_data = p2.load()
|
||||
|
||||
def dict_diff(a, b, whitelist=set()):
|
||||
@@ -317,18 +302,6 @@ def compare_sigfiles(a, b):
|
||||
for dep in changed:
|
||||
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
|
||||
|
||||
changed, added, removed = dict_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
||||
if changed:
|
||||
for f in changed:
|
||||
print "Checksum for file %s changed from %s to %s" % (f, a_data['file_checksum_values'][f], b_data['file_checksum_values'][f])
|
||||
if added:
|
||||
for f in added:
|
||||
print "Dependency on checksum of file %s was added" % (f)
|
||||
if removed:
|
||||
for f in removed:
|
||||
print "Dependency on checksum of file %s was removed" % (f)
|
||||
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = clean_basepaths(a_data['runtaskhashes'])
|
||||
b = clean_basepaths(b_data['runtaskhashes'])
|
||||
@@ -358,7 +331,7 @@ def compare_sigfiles(a, b):
|
||||
print "Hash for dependent task %s changed from %s to %s" % (dep, a[dep], b[dep])
|
||||
|
||||
def dump_sigfile(a):
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
|
||||
print "basewhitelist: %s" % (a_data['basewhitelist'])
|
||||
@@ -378,9 +351,6 @@ def dump_sigfile(a):
|
||||
if 'runtaskdeps' in a_data:
|
||||
print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
|
||||
|
||||
if 'file_checksum_values' in a_data:
|
||||
print "This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])
|
||||
|
||||
if 'runtaskhashes' in a_data:
|
||||
for dep in a_data['runtaskhashes']:
|
||||
print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
|
||||
|
||||
@@ -1,369 +0,0 @@
|
||||
#
|
||||
# BitBake Test for codeparser.py
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import logging
|
||||
import bb
|
||||
|
||||
logger = logging.getLogger('BitBake.TestCodeParser')
|
||||
|
||||
import bb.data
|
||||
|
||||
class ReferenceTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
|
||||
def setEmptyVars(self, varlist):
|
||||
for k in varlist:
|
||||
self.d.setVar(k, "")
|
||||
|
||||
def setValues(self, values):
|
||||
for k, v in values.items():
|
||||
self.d.setVar(k, v)
|
||||
|
||||
def assertReferences(self, refs):
|
||||
self.assertEqual(self.references, refs)
|
||||
|
||||
def assertExecs(self, execs):
|
||||
self.assertEqual(self.execs, execs)
|
||||
|
||||
class VariableReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
self.references = parsedvar.references
|
||||
|
||||
def test_simple_reference(self):
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("${FOO}")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
|
||||
def test_nested_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.d.setVar("FOO", "BAR")
|
||||
self.parseExpression("${${FOO}}")
|
||||
self.assertReferences(set(["FOO", "BAR"]))
|
||||
|
||||
def test_python_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
|
||||
self.assertReferences(set(["BAR"]))
|
||||
|
||||
class ShellReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.ShellParser("ParserTest", logger)
|
||||
parser.parse_shell(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references
|
||||
self.execs = parser.execs
|
||||
|
||||
def test_quotes_inside_assign(self):
|
||||
self.parseExpression('foo=foo"bar"baz')
|
||||
self.assertReferences(set([]))
|
||||
|
||||
def test_quotes_inside_arg(self):
|
||||
self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_arg_continuation(self):
|
||||
self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_dollar_in_quoted(self):
|
||||
self.parseExpression('sed -i -e "foo$" *.pc')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_quotes_inside_arg_continuation(self):
|
||||
self.setEmptyVars(["bindir", "D", "libdir"])
|
||||
self.parseExpression("""
|
||||
sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
|
||||
-e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
|
||||
${D}${libdir}/pkgconfig/*.pc
|
||||
""")
|
||||
self.assertReferences(set(["bindir", "D", "libdir"]))
|
||||
|
||||
def test_assign_subshell_expansion(self):
|
||||
self.parseExpression("foo=$(echo bar)")
|
||||
self.assertExecs(set(["echo"]))
|
||||
|
||||
def test_shell_unexpanded(self):
|
||||
self.setEmptyVars(["QT_BASE_NAME"])
|
||||
self.parseExpression('echo "${QT_BASE_NAME}"')
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["QT_BASE_NAME"]))
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
|
||||
def test_until(self):
|
||||
self.parseExpression("until false; do echo true; done")
|
||||
self.assertExecs(set(["false", "echo"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_case(self):
|
||||
self.parseExpression("""
|
||||
case $foo in
|
||||
*)
|
||||
bar
|
||||
;;
|
||||
esac
|
||||
""")
|
||||
self.assertExecs(set(["bar"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_assign_exec(self):
|
||||
self.parseExpression("a=b c='foo bar' alpha 1 2 3")
|
||||
self.assertExecs(set(["alpha"]))
|
||||
|
||||
def test_redirect_to_file(self):
|
||||
self.setEmptyVars(["foo"])
|
||||
self.parseExpression("echo foo >${foo}/bar")
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["foo"]))
|
||||
|
||||
def test_heredoc(self):
|
||||
self.setEmptyVars(["theta"])
|
||||
self.parseExpression("""
|
||||
cat <<END
|
||||
alpha
|
||||
beta
|
||||
${theta}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(["theta"]))
|
||||
|
||||
def test_redirect_from_heredoc(self):
|
||||
v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
|
||||
self.setEmptyVars(v)
|
||||
self.parseExpression("""
|
||||
cat <<END >${B}/cachedpaths
|
||||
shadow_cv_maildir=${SHADOW_MAILDIR}
|
||||
shadow_cv_mailfile=${SHADOW_MAILFILE}
|
||||
shadow_cv_utmpdir=${SHADOW_UTMPDIR}
|
||||
shadow_cv_logdir=${SHADOW_LOGDIR}
|
||||
shadow_cv_passwd_dir=${bindir}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(v))
|
||||
self.assertExecs(set(["cat"]))
|
||||
|
||||
# def test_incomplete_command_expansion(self):
|
||||
# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
|
||||
# bbvalue.shparse("cp foo`", self.d), self.d)
|
||||
|
||||
# def test_rogue_dollarsign(self):
|
||||
# self.setValues({"D" : "/tmp"})
|
||||
# self.parseExpression("install -d ${D}$")
|
||||
# self.assertReferences(set(["D"]))
|
||||
# self.assertExecs(set(["install"]))
|
||||
|
||||
|
||||
class PythonReferenceTest(ReferenceTest):
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
if hasattr(bb.utils, "_context"):
|
||||
self.context = bb.utils._context
|
||||
else:
|
||||
import __builtin__
|
||||
self.context = __builtin__.__dict__
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.PythonParser("ParserTest", logger)
|
||||
parser.parse_python(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references | parser.references
|
||||
self.execs = parser.execs
|
||||
|
||||
@staticmethod
|
||||
def indent(value):
|
||||
"""Python Snippets have to be indented, python values don't have to
|
||||
be. These unit tests are testing snippets."""
|
||||
return " " + value
|
||||
|
||||
def test_getvar_reference(self):
|
||||
self.parseExpression("bb.data.getVar('foo', d, True)")
|
||||
self.assertReferences(set(["foo"]))
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_computed_reference(self):
|
||||
self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_exec_reference(self):
|
||||
self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set(["eval"]))
|
||||
|
||||
def test_var_reference(self):
|
||||
self.context["foo"] = lambda x: x
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("foo('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["foo"]))
|
||||
del self.context["foo"]
|
||||
|
||||
def test_var_exec(self):
|
||||
for etype in ("func", "task"):
|
||||
self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
|
||||
self.d.setVarFlag("do_something", etype, True)
|
||||
self.parseExpression("bb.build.exec_func('do_something', d)")
|
||||
self.assertReferences(set(["do_something"]))
|
||||
|
||||
def test_function_reference(self):
|
||||
self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
|
||||
self.d.setVar("FOO", "Hello, World!")
|
||||
self.parseExpression("testfunc('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["testfunc"]))
|
||||
del self.context["testfunc"]
|
||||
|
||||
def test_qualified_function_reference(self):
|
||||
self.parseExpression("time.time()")
|
||||
self.assertExecs(set(["time.time"]))
|
||||
|
||||
def test_qualified_function_reference_2(self):
|
||||
self.parseExpression("os.path.dirname('/foo/bar')")
|
||||
self.assertExecs(set(["os.path.dirname"]))
|
||||
|
||||
def test_qualified_function_reference_nested(self):
|
||||
self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
|
||||
self.assertExecs(set(["time.strftime", "time.gmtime"]))
|
||||
|
||||
def test_function_reference_chained(self):
|
||||
self.context["testget"] = lambda: "\tstrip me "
|
||||
self.parseExpression("testget().strip()")
|
||||
self.assertExecs(set(["testget"]))
|
||||
del self.context["testget"]
|
||||
|
||||
|
||||
class DependencyReferenceTest(ReferenceTest):
|
||||
|
||||
pydata = """
|
||||
bb.data.getVar('somevar', d, True)
|
||||
def test(d):
|
||||
foo = 'bar %s' % 'foo'
|
||||
def test2(d):
|
||||
d.getVar(foo, True)
|
||||
d.getVar('bar', False)
|
||||
test2(d)
|
||||
|
||||
def a():
|
||||
\"\"\"some
|
||||
stuff
|
||||
\"\"\"
|
||||
return "heh"
|
||||
|
||||
test(d)
|
||||
|
||||
bb.data.expand(bb.data.getVar("something", False, d), d)
|
||||
bb.data.expand("${inexpand} somethingelse", d)
|
||||
bb.data.getVar(a(), d, False)
|
||||
"""
|
||||
|
||||
def test_python(self):
|
||||
self.d.setVar("FOO", self.pydata)
|
||||
self.setEmptyVars(["inexpand", "a", "test2", "test"])
|
||||
self.d.setVarFlags("FOO", {"func": True, "python": True})
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
foo () {
|
||||
bar
|
||||
}
|
||||
{
|
||||
echo baz
|
||||
$(heh)
|
||||
eval `moo`
|
||||
}
|
||||
a=b
|
||||
c=d
|
||||
(
|
||||
true && false
|
||||
test -f foo
|
||||
testval=something
|
||||
$testval
|
||||
) || aiee
|
||||
! inverted
|
||||
echo ${somevar}
|
||||
|
||||
case foo in
|
||||
bar)
|
||||
echo bar
|
||||
;;
|
||||
baz)
|
||||
echo baz
|
||||
;;
|
||||
foo*)
|
||||
echo foo
|
||||
;;
|
||||
esac
|
||||
"""
|
||||
|
||||
def test_shell(self):
|
||||
execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
|
||||
self.d.setVar("somevar", "heh")
|
||||
self.d.setVar("inverted", "echo inverted...")
|
||||
self.d.setVarFlag("inverted", "func", True)
|
||||
self.d.setVar("FOO", self.shelldata)
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
self.setEmptyVars(execs)
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
# self.d.setVar("oe_libinstall", "echo test")
|
||||
# self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
# self.d.setVarFlag("FOO", "vardeps", "oe_*")
|
||||
# self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for Copy-on-Write (cow.py)
|
||||
#
|
||||
# Copyright 2006 Holger Freyther <freyther@handhelds.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
class COWTestCase(unittest.TestCase):
|
||||
"""
|
||||
Test case for the COW module from mithro
|
||||
"""
|
||||
|
||||
def testGetSet(self):
|
||||
"""
|
||||
Test and set
|
||||
"""
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
Test the copy of copies
|
||||
"""
|
||||
|
||||
from bb.COW import COWDictBase
|
||||
|
||||
# create two COW dict 'instances'
|
||||
b = COWDictBase.copy()
|
||||
c = COWDictBase.copy()
|
||||
|
||||
# assign some keys to one instance, some keys to another
|
||||
b['a'] = 10
|
||||
b['c'] = 20
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
b_3 = b_2.copy()
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
c = COWDictBase.copy()
|
||||
c['123'] = 1027
|
||||
c['other'] = 4711
|
||||
c['d'] = { 'abc' : 10, 'bcd' : 20 }
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
copy['other'] = 4712
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
@@ -1,252 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Data Store (data.py/data_smart.py)
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "value of foo"
|
||||
self.d["bar"] = "value of bar"
|
||||
self.d["value of foo"] = "value of 'value of foo'"
|
||||
|
||||
def test_one_var(self):
|
||||
val = self.d.expand("${foo}")
|
||||
self.assertEqual(str(val), "value of foo")
|
||||
|
||||
def test_indirect_one_var(self):
|
||||
val = self.d.expand("${${foo}}")
|
||||
self.assertEqual(str(val), "value of 'value of foo'")
|
||||
|
||||
def test_indirect_and_another(self):
|
||||
val = self.d.expand("${${foo}} ${bar}")
|
||||
self.assertEqual(str(val), "value of 'value of foo' value of bar")
|
||||
|
||||
def test_python_snippet(self):
|
||||
val = self.d.expand("${@5*12}")
|
||||
self.assertEqual(str(val), "60")
|
||||
|
||||
def test_expand_in_python_snippet(self):
|
||||
val = self.d.expand("${@'boo ' + '${foo}'}")
|
||||
self.assertEqual(str(val), "boo value of foo")
|
||||
|
||||
def test_python_snippet_getvar(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_python_snippet_syntax_error(self):
|
||||
self.d.setVar("FOO", "${@foo = 5}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_runtime_error(self):
|
||||
self.d.setVar("FOO", "${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_error_path(self):
|
||||
self.d.setVar("FOO", "foo value ${BAR}")
|
||||
self.d.setVar("BAR", "bar value ${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_value_containing_value(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_reference_undefined_var(self):
|
||||
val = self.d.expand("${undefinedvar} meh")
|
||||
self.assertEqual(str(val), "${undefinedvar} meh")
|
||||
|
||||
def test_double_reference(self):
|
||||
self.d.setVar("BAR", "bar value")
|
||||
self.d.setVar("FOO", "${BAR} foo ${BAR}")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "bar value foo bar value")
|
||||
|
||||
def test_direct_recursion(self):
|
||||
self.d.setVar("FOO", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_indirect_recursion(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${BAZ}")
|
||||
self.d.setVar("BAZ", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_recursion_exception(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${${@'FOO'}}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
|
||||
|
||||
def test_nonstring(self):
|
||||
self.d.setVar("TEST", 5)
|
||||
val = self.d.getVar("TEST", True)
|
||||
self.assertEqual(str(val), "5")
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value of foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value of foo', 'foo', 'bar'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "foo"
|
||||
self.d["bar"] = "bar"
|
||||
self.d["value of foobar"] = "187"
|
||||
|
||||
def test_refs(self):
|
||||
val = self.d.expand("${value of ${foo}${bar}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
#def test_python_refs(self):
|
||||
# val = self.d.expand("${@${@3}**2 + ${@4}**2}")
|
||||
# self.assertEqual(str(val), "25")
|
||||
|
||||
def test_ref_in_python_ref(self):
|
||||
val = self.d.expand("${@'${foo}' + 'bar'}")
|
||||
self.assertEqual(str(val), "foobar")
|
||||
|
||||
def test_python_ref_in_ref(self):
|
||||
val = self.d.expand("${${@'f'+'o'+'o'}}")
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
def test_deep_nesting(self):
|
||||
depth = 100
|
||||
val = self.d.expand("${" * depth + "foo" + "}" * depth)
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
#def test_deep_python_nesting(self):
|
||||
# depth = 50
|
||||
# val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
|
||||
# self.assertEqual(str(val), str(depth + 1))
|
||||
|
||||
def test_mixed(self):
|
||||
val = self.d.expand("${value of ${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
def test_runtime(self):
|
||||
val = self.d.expand("${${@'value of' + ' f'+'o'+'o'+'b'+'a'+'r'}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("VAL", "val")
|
||||
self.d.setVar("BAR", "bar")
|
||||
|
||||
def test_prepend(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val")
|
||||
|
||||
def test_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "val:bar")
|
||||
|
||||
def test_multiple_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.d.appendVar("TEST", ":val2")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("OVERRIDES", "foo:bar:local")
|
||||
self.d.setVar("TEST", "testvalue")
|
||||
|
||||
def test_no_override(self):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue")
|
||||
|
||||
def test_one_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("foo", "value of foo")
|
||||
self.d.setVarFlag("foo", "flag1", "value of flag1")
|
||||
self.d.setVarFlag("foo", "flag2", "value of flag2")
|
||||
|
||||
def test_setflag(self):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
|
||||
|
||||
def test_delflag(self):
|
||||
self.d.delVarFlag("foo", "flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Fetcher (fetch2/)
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import bb
|
||||
|
||||
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
os.mkdir(self.dldir)
|
||||
self.d.setVar("DL_DIR", self.dldir)
|
||||
self.unpackdir = os.path.join(self.tempdir, "unpacked")
|
||||
os.mkdir(self.unpackdir)
|
||||
persistdir = os.path.join(self.tempdir, "persistdata")
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
def test_fetch(self):
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
|
||||
|
||||
def test_fetch_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_gitfetch(self):
|
||||
def checkrevision(self, fetcher):
|
||||
fetcher.unpack(self.unpackdir)
|
||||
revision = subprocess.check_output("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git").strip()
|
||||
self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
|
||||
self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
|
||||
self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake"], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
# Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
|
||||
bb.utils.prunedir(self.dldir + "/git2/")
|
||||
bb.utils.prunedir(self.unpackdir)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake"], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
|
||||
}
|
||||
|
||||
def test_decodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.decodeurl(k)
|
||||
self.assertEqual(result, v)
|
||||
|
||||
def test_encodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for utils.py
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
def test_vercmpstring(self):
|
||||
result = bb.utils.vercmp_string('1', '2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('2', '1')
|
||||
self.assertTrue(result > 0)
|
||||
result = bb.utils.vercmp_string('1', '1.0')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1', '1.1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
|
||||
@@ -23,13 +23,11 @@
|
||||
import gtk
|
||||
import pango
|
||||
import gobject
|
||||
import bb.process
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.runningbuild import BuildFailureTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class BuildConfigurationTreeView(gtk.TreeView):
|
||||
def __init__ (self):
|
||||
@@ -98,10 +96,11 @@ class BuildConfigurationTreeView(gtk.TreeView):
|
||||
for path in src_config_info.layers:
|
||||
import os, os.path
|
||||
if os.path.exists(path):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
if branch:
|
||||
branch = branch.strip('\n')
|
||||
f = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path)
|
||||
if f:
|
||||
branch = f.readline().lstrip('\n').rstrip('\n')
|
||||
vars.append(self.set_vars("Branch:", branch))
|
||||
f.close()
|
||||
break
|
||||
|
||||
self.set_config_model(vars)
|
||||
@@ -145,7 +144,7 @@ class BuildDetailsPage (HobPage):
|
||||
self.scrolled_view_config = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_config.add(self.config_tv)
|
||||
self.notebook.append_page(self.scrolled_view_config, "Build configuration")
|
||||
self.notebook.append_page(self.scrolled_view_config, gtk.Label("Build configuration"))
|
||||
|
||||
self.failure_tv = BuildFailureTreeView()
|
||||
self.failure_model = self.builder.handler.build.model.failure_model()
|
||||
@@ -153,14 +152,14 @@ class BuildDetailsPage (HobPage):
|
||||
self.scrolled_view_failure = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_failure.add(self.failure_tv)
|
||||
self.notebook.append_page(self.scrolled_view_failure, "Issues")
|
||||
self.notebook.append_page(self.scrolled_view_failure, gtk.Label("Issues"))
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view_build = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_build.add(self.build_tv)
|
||||
self.notebook.append_page(self.scrolled_view_build, "Log")
|
||||
self.notebook.append_page(self.scrolled_view_build, gtk.Label("Log"))
|
||||
|
||||
self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
|
||||
|
||||
@@ -199,87 +198,6 @@ class BuildDetailsPage (HobPage):
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def update_failures_sum_display(self):
|
||||
num = 0
|
||||
it = self.failure_model.get_iter_first()
|
||||
while it:
|
||||
color = self.failure_model.get_value(it, self.builder.handler.build.model.COL_COLOR)
|
||||
if color == HobColors.ERROR:
|
||||
num += 1
|
||||
it = self.failure_model.iter_next(it)
|
||||
|
||||
return num
|
||||
|
||||
def add_build_fail_top_bar(self, actions):
|
||||
mainly_action = "Edit %s" % actions
|
||||
if 'image' in actions:
|
||||
next_action = ""
|
||||
else:
|
||||
next_action = "Create new image"
|
||||
|
||||
#set to issue page
|
||||
self.notebook.set_page("Issues")
|
||||
|
||||
color = HobColors.ERROR
|
||||
build_fail_top = gtk.EventBox()
|
||||
build_fail_top.set_size_request(-1, 260)
|
||||
build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
|
||||
build_fail_tab = gtk.Table(7, 40, True)
|
||||
build_fail_top.add(build_fail_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_fail_tab.attach(icon, 1, 4, 0, 3)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
build_fail_tab.attach(label, 4, 20, 0, 3)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
num_of_fails = self.update_failures_sum_display()
|
||||
current_fail, recipe_task_status = self.task_status.get_text().split('\n')
|
||||
label.set_markup(" %d tasks failed, %s, %s" % (num_of_fails, current_fail, recipe_task_status))
|
||||
build_fail_tab.attach(label, 4, 40, 2, 4)
|
||||
|
||||
# create button 'Edit packages'
|
||||
action_button = HobButton(mainly_action)
|
||||
action_button.set_size_request(-1, 49)
|
||||
action_button.connect('clicked', self.failure_main_action_button_clicked_cb, mainly_action)
|
||||
build_fail_tab.attach(action_button, 4, 16, 4, 6)
|
||||
|
||||
if next_action:
|
||||
next_button = HobAltButton(next_action)
|
||||
next_button.set_alignment(0.0, 0.5)
|
||||
next_button.connect('clicked', self.failure_next_action_button_clicked_cb, next_action)
|
||||
build_fail_tab.attach(next_button, 17, 24, 4, 5)
|
||||
|
||||
file_bug_button = HobAltButton('File a bug')
|
||||
file_bug_button.set_alignment(0.0, 0.5)
|
||||
file_bug_button.connect('clicked', self.failure_file_bug_activate_link_cb)
|
||||
build_fail_tab.attach(file_bug_button, 17, 24, 4 + abs(next_action != ""), 6)
|
||||
|
||||
return build_fail_top
|
||||
|
||||
def show_fail_page(self, title, action_names):
|
||||
self._remove_all_widget()
|
||||
self.title = "Hob cannot build your %s" % title
|
||||
|
||||
self.build_fail_bar = self.add_build_fail_top_bar(action_names)
|
||||
self.pack_start(self.build_fail_bar)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
|
||||
self.box_group_area.pack_end(self.button_box, expand=False, fill=False)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
@@ -333,18 +251,3 @@ class BuildDetailsPage (HobPage):
|
||||
|
||||
def show_configurations(self, configurations, params):
|
||||
self.config_tv.show(configurations, params)
|
||||
|
||||
def failure_main_action_button_clicked_cb(self, button, action):
|
||||
if "Edit recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "Edit packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "Edit image configuration" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def failure_next_action_button_clicked_cb(self, button, action):
|
||||
if "Create new image" in action:
|
||||
self.builder.initiate_new_build_async()
|
||||
|
||||
def failure_file_bug_activate_link_cb(self, button):
|
||||
button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
|
||||
|
||||
@@ -26,7 +26,6 @@ import copy
|
||||
import os
|
||||
import subprocess
|
||||
import shlex
|
||||
import re
|
||||
from bb.ui.crumbs.template import TemplateMgr
|
||||
from bb.ui.crumbs.imageconfigurationpage import ImageConfigurationPage
|
||||
from bb.ui.crumbs.recipeselectionpage import RecipeSelectionPage
|
||||
@@ -40,49 +39,9 @@ from bb.ui.crumbs.hig import CrumbsMessageDialog, ImageSelectionDialog, \
|
||||
from bb.ui.crumbs.persistenttooltip import PersistentTooltip
|
||||
import bb.ui.crumbs.utils
|
||||
|
||||
hobVer = 20120530
|
||||
|
||||
class Configuration:
|
||||
'''Represents the data structure of configuration.'''
|
||||
|
||||
@classmethod
|
||||
def parse_proxy_string(cls, proxy):
|
||||
pattern = "^\s*((http|https|ftp|git|cvs)://)?((\S+):(\S+)@)?(\S+):(\d+)/?"
|
||||
match = re.search(pattern, proxy)
|
||||
if match:
|
||||
return match.group(2), match.group(4), match.group(5), match.group(6), match.group(7)
|
||||
else:
|
||||
return None, None, None, "", ""
|
||||
|
||||
@classmethod
|
||||
def make_host_string(cls, prot, user, passwd, host, default_prot=""):
|
||||
if host == None or host == "":
|
||||
return ""
|
||||
|
||||
passwd = passwd or ""
|
||||
|
||||
if user != None and user != "":
|
||||
if prot == None or prot == "":
|
||||
prot = default_prot
|
||||
return prot + "://" + user + ":" + passwd + "@" + host
|
||||
else:
|
||||
if prot == None or prot == "":
|
||||
return host
|
||||
else:
|
||||
return prot + "://" + host
|
||||
|
||||
@classmethod
|
||||
def make_port_string(cls, port):
|
||||
port = port or ""
|
||||
return port
|
||||
|
||||
@classmethod
|
||||
def make_proxy_string(cls, prot, user, passwd, host, port, default_prot=""):
|
||||
if host == None or host == "" or port == None or port == "":
|
||||
return ""
|
||||
|
||||
return Configuration.make_host_string(prot, user, passwd, host, default_prot) + ":" + Configuration.make_port_string(port)
|
||||
|
||||
def __init__(self):
|
||||
self.curr_mach = ""
|
||||
# settings
|
||||
@@ -108,43 +67,15 @@ class Configuration:
|
||||
self.default_task = "build"
|
||||
|
||||
# proxy settings
|
||||
self.enable_proxy = None
|
||||
self.same_proxy = False
|
||||
self.proxies = {
|
||||
"http" : [None, None, None, "", ""], # protocol : [prot, user, passwd, host, port]
|
||||
"https" : [None, None, None, "", ""],
|
||||
"ftp" : [None, None, None, "", ""],
|
||||
"git" : [None, None, None, "", ""],
|
||||
"cvs" : [None, None, None, "", ""],
|
||||
}
|
||||
self.all_proxy = self.http_proxy = self.ftp_proxy = self.https_proxy = ""
|
||||
self.git_proxy_host = self.git_proxy_port = ""
|
||||
self.cvs_proxy_host = self.cvs_proxy_port = ""
|
||||
|
||||
def clear_selection(self):
|
||||
self.selected_image = None
|
||||
self.selected_recipes = []
|
||||
self.selected_packages = []
|
||||
|
||||
def split_proxy(self, protocol, proxy):
|
||||
entry = []
|
||||
prot, user, passwd, host, port = Configuration.parse_proxy_string(proxy)
|
||||
entry.append(prot)
|
||||
entry.append(user)
|
||||
entry.append(passwd)
|
||||
entry.append(host)
|
||||
entry.append(port)
|
||||
self.proxies[protocol] = entry
|
||||
|
||||
def combine_proxy(self, protocol):
|
||||
entry = self.proxies[protocol]
|
||||
return Configuration.make_proxy_string(entry[0], entry[1], entry[2], entry[3], entry[4], protocol)
|
||||
|
||||
def combine_host_only(self, protocol):
|
||||
entry = self.proxies[protocol]
|
||||
return Configuration.make_host_string(entry[0], entry[1], entry[2], entry[3], protocol)
|
||||
|
||||
def combine_port_only(self, protocol):
|
||||
entry = self.proxies[protocol]
|
||||
return Configuration.make_port_string(entry[4])
|
||||
|
||||
def update(self, params):
|
||||
# settings
|
||||
self.curr_distro = params["distro"]
|
||||
@@ -168,14 +99,14 @@ class Configuration:
|
||||
self.default_task = params["default_task"]
|
||||
|
||||
# proxy settings
|
||||
self.enable_proxy = params["http_proxy"] != "" or params["https_proxy"] != "" or params["ftp_proxy"] != "" \
|
||||
or params["git_proxy_host"] != "" or params["git_proxy_port"] != "" \
|
||||
or params["cvs_proxy_host"] != "" or params["cvs_proxy_port"] != ""
|
||||
self.split_proxy("http", params["http_proxy"])
|
||||
self.split_proxy("https", params["https_proxy"])
|
||||
self.split_proxy("ftp", params["ftp_proxy"])
|
||||
self.split_proxy("git", params["git_proxy_host"] + ":" + params["git_proxy_port"])
|
||||
self.split_proxy("cvs", params["cvs_proxy_host"] + ":" + params["cvs_proxy_port"])
|
||||
self.all_proxy = params["all_proxy"]
|
||||
self.http_proxy = params["http_proxy"]
|
||||
self.ftp_proxy = params["ftp_proxy"]
|
||||
self.https_proxy = params["https_proxy"]
|
||||
self.git_proxy_host = params["git_proxy_host"]
|
||||
self.git_proxy_port = params["git_proxy_port"]
|
||||
self.cvs_proxy_host = params["cvs_proxy_host"]
|
||||
self.cvs_proxy_port = params["cvs_proxy_port"]
|
||||
|
||||
def load(self, template):
|
||||
self.curr_mach = template.getVar("MACHINE")
|
||||
@@ -215,16 +146,16 @@ class Configuration:
|
||||
self.selected_recipes = template.getVar("DEPENDS").split()
|
||||
self.selected_packages = template.getVar("IMAGE_INSTALL").split()
|
||||
# proxy
|
||||
self.enable_proxy = eval(template.getVar("enable_proxy"))
|
||||
self.same_proxy = eval(template.getVar("use_same_proxy"))
|
||||
self.split_proxy("http", template.getVar("http_proxy"))
|
||||
self.split_proxy("https", template.getVar("https_proxy"))
|
||||
self.split_proxy("ftp", template.getVar("ftp_proxy"))
|
||||
self.split_proxy("git", template.getVar("GIT_PROXY_HOST") + ":" + template.getVar("GIT_PROXY_PORT"))
|
||||
self.split_proxy("cvs", template.getVar("CVS_PROXY_HOST") + ":" + template.getVar("CVS_PROXY_PORT"))
|
||||
self.all_proxy = template.getVar("all_proxy")
|
||||
self.http_proxy = template.getVar("http_proxy")
|
||||
self.ftp_proxy = template.getVar("ftp_proxy")
|
||||
self.https_proxy = template.getVar("https_proxy")
|
||||
self.git_proxy_host = template.getVar("GIT_PROXY_HOST")
|
||||
self.git_proxy_port = template.getVar("GIT_PROXY_PORT")
|
||||
self.cvs_proxy_host = template.getVar("CVS_PROXY_HOST")
|
||||
self.cvs_proxy_port = template.getVar("CVS_PROXY_PORT")
|
||||
|
||||
def save(self, template, defaults=False):
|
||||
template.setVar("VERSION", "%s" % hobVer)
|
||||
# bblayers.conf
|
||||
template.setVar("BBLAYERS", " ".join(self.layers))
|
||||
# local.conf
|
||||
@@ -252,15 +183,14 @@ class Configuration:
|
||||
template.setVar("DEPENDS", self.selected_recipes)
|
||||
template.setVar("IMAGE_INSTALL", self.user_selected_packages)
|
||||
# proxy
|
||||
template.setVar("enable_proxy", self.enable_proxy)
|
||||
template.setVar("use_same_proxy", self.same_proxy)
|
||||
template.setVar("http_proxy", self.combine_proxy("http"))
|
||||
template.setVar("https_proxy", self.combine_proxy("https"))
|
||||
template.setVar("ftp_proxy", self.combine_proxy("ftp"))
|
||||
template.setVar("GIT_PROXY_HOST", self.combine_host_only("git"))
|
||||
template.setVar("GIT_PROXY_PORT", self.combine_port_only("git"))
|
||||
template.setVar("CVS_PROXY_HOST", self.combine_host_only("cvs"))
|
||||
template.setVar("CVS_PROXY_PORT", self.combine_port_only("cvs"))
|
||||
template.setVar("all_proxy", self.all_proxy)
|
||||
template.setVar("http_proxy", self.http_proxy)
|
||||
template.setVar("ftp_proxy", self.ftp_proxy)
|
||||
template.setVar("https_proxy", self.https_proxy)
|
||||
template.setVar("GIT_PROXY_HOST", self.git_proxy_host)
|
||||
template.setVar("GIT_PROXY_PORT", self.git_proxy_port)
|
||||
template.setVar("CVS_PROXY_HOST", self.cvs_proxy_host)
|
||||
template.setVar("CVS_PROXY_PORT", self.cvs_proxy_port)
|
||||
|
||||
class Parameters:
|
||||
'''Represents other variables like available machines, etc.'''
|
||||
@@ -282,8 +212,7 @@ class Parameters:
|
||||
self.all_sdk_machines = []
|
||||
self.all_layers = []
|
||||
self.image_names = []
|
||||
self.image_white_pattern = ""
|
||||
self.image_black_pattern = ""
|
||||
self.enable_proxy = False
|
||||
|
||||
# for build log to show
|
||||
self.bb_version = ""
|
||||
@@ -301,9 +230,6 @@ class Parameters:
|
||||
self.runnable_machine_patterns = params["runnable_machine_patterns"].split()
|
||||
self.deployable_image_types = params["deployable_image_types"].split()
|
||||
self.tmpdir = params["tmpdir"]
|
||||
self.image_white_pattern = params["image_white_pattern"]
|
||||
self.image_black_pattern = params["image_black_pattern"]
|
||||
self.kernel_image_type = params["kernel_image_type"]
|
||||
# for build log to show
|
||||
self.bb_version = params["bb_version"]
|
||||
self.target_arch = params["target_arch"]
|
||||
@@ -375,15 +301,6 @@ class Builder(gtk.Window):
|
||||
END_NOOP : None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def interpret_markup(cls, msg):
|
||||
msg = msg.replace('&', '&')
|
||||
msg = msg.replace('<', '<')
|
||||
msg = msg.replace('>', '>')
|
||||
msg = msg.replace('"', '"')
|
||||
msg = msg.replace("'", "´")
|
||||
return msg
|
||||
|
||||
def __init__(self, hobHandler, recipe_model, package_model):
|
||||
super(Builder, self).__init__()
|
||||
|
||||
@@ -481,7 +398,7 @@ class Builder(gtk.Window):
|
||||
|
||||
def initiate_new_build_async(self):
|
||||
self.switch_page(self.MACHINE_SELECTION)
|
||||
if self.load_template(TemplateMgr.convert_to_template_pathfilename("default", ".hob/")) == False:
|
||||
if self.load_template(TemplateMgr.convert_to_template_pathfilename("default", ".hob/")) == None:
|
||||
self.handler.init_cooker()
|
||||
self.handler.set_extra_inherit("image_types")
|
||||
self.handler.generate_configuration()
|
||||
@@ -524,7 +441,7 @@ class Builder(gtk.Window):
|
||||
toolchain_packages = []
|
||||
if self.configuration.toolchain_build:
|
||||
toolchain_packages = self.package_model.get_selected_packages_toolchain()
|
||||
if self.configuration.selected_image == self.recipe_model.__custom_image__:
|
||||
if self.configuration.selected_image == self.recipe_model.__dummy_image__:
|
||||
packages = self.package_model.get_selected_packages()
|
||||
image = self.hob_image
|
||||
else:
|
||||
@@ -550,16 +467,9 @@ class Builder(gtk.Window):
|
||||
|
||||
def load_template(self, path):
|
||||
if not os.path.isfile(path):
|
||||
return False
|
||||
return None
|
||||
|
||||
self.template = TemplateMgr()
|
||||
# check compatibility
|
||||
tempVer = self.template.getVersion(path)
|
||||
if not tempVer or int(tempVer) < hobVer:
|
||||
self.template.destroy()
|
||||
self.template = None
|
||||
return False
|
||||
|
||||
try:
|
||||
self.template.load(path)
|
||||
self.configuration.load(self.template)
|
||||
@@ -671,18 +581,13 @@ class Builder(gtk.Window):
|
||||
self.handler.set_extra_inherit("packageinfo")
|
||||
self.handler.set_extra_inherit("image_types")
|
||||
# set proxies
|
||||
if self.configuration.enable_proxy == True:
|
||||
self.handler.set_http_proxy(self.configuration.combine_proxy("http"))
|
||||
self.handler.set_https_proxy(self.configuration.combine_proxy("https"))
|
||||
self.handler.set_ftp_proxy(self.configuration.combine_proxy("ftp"))
|
||||
self.handler.set_git_proxy(self.configuration.combine_host_only("git"), self.configuration.combine_port_only("git"))
|
||||
self.handler.set_cvs_proxy(self.configuration.combine_host_only("cvs"), self.configuration.combine_port_only("cvs"))
|
||||
elif self.configuration.enable_proxy == False:
|
||||
self.handler.set_http_proxy("")
|
||||
self.handler.set_https_proxy("")
|
||||
self.handler.set_ftp_proxy("")
|
||||
self.handler.set_git_proxy("", "")
|
||||
self.handler.set_cvs_proxy("", "")
|
||||
if self.parameters.enable_proxy:
|
||||
self.handler.set_http_proxy(self.configuration.http_proxy)
|
||||
self.handler.set_https_proxy(self.configuration.https_proxy)
|
||||
self.handler.set_ftp_proxy(self.configuration.ftp_proxy)
|
||||
self.handler.set_all_proxy(self.configuration.all_proxy)
|
||||
self.handler.set_git_proxy(self.configuration.git_proxy_host, self.configuration.git_proxy_port)
|
||||
self.handler.set_cvs_proxy(self.configuration.cvs_proxy_host, self.configuration.cvs_proxy_port)
|
||||
|
||||
def update_recipe_model(self, selected_image, selected_recipes):
|
||||
self.recipe_model.set_selected_image(selected_image)
|
||||
@@ -739,7 +644,7 @@ class Builder(gtk.Window):
|
||||
|
||||
def show_error_dialog(self, msg):
|
||||
lbl = "<b>Error</b>\n"
|
||||
lbl = lbl + "%s\n\n" % Builder.interpret_markup(msg)
|
||||
lbl = lbl + "%s\n\n" % msg
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_ERROR)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
@@ -790,7 +695,7 @@ class Builder(gtk.Window):
|
||||
selected_packages = self.configuration.selected_packages[:]
|
||||
|
||||
self.image_configuration_page.update_image_combo(self.recipe_model, selected_image)
|
||||
self.image_configuration_page.update_image_desc()
|
||||
self.image_configuration_page.update_image_desc(selected_image)
|
||||
self.update_recipe_model(selected_image, selected_recipes)
|
||||
self.update_package_model(selected_packages)
|
||||
|
||||
@@ -860,7 +765,7 @@ class Builder(gtk.Window):
|
||||
fraction = 1.0
|
||||
self.parameters.image_names = []
|
||||
selected_image = self.recipe_model.get_selected_image()
|
||||
if selected_image == self.recipe_model.__custom_image__:
|
||||
if selected_image == self.recipe_model.__dummy_image__:
|
||||
linkname = 'hob-image-' + self.configuration.curr_mach
|
||||
else:
|
||||
linkname = selected_image + '-' + self.configuration.curr_mach
|
||||
@@ -886,20 +791,12 @@ class Builder(gtk.Window):
|
||||
message = "Build stopped: "
|
||||
fraction = self.build_details_page.progress_bar.get_fraction()
|
||||
else:
|
||||
fail_to_next_edit = ""
|
||||
if self.current_step == self.FAST_IMAGE_GENERATING:
|
||||
fail_to_next_edit = "image configuration"
|
||||
fraction = 0.9
|
||||
elif self.current_step == self.IMAGE_GENERATING:
|
||||
if self.previous_step == self.FAST_IMAGE_GENERATING:
|
||||
fail_to_next_edit = "image configuration"
|
||||
else:
|
||||
fail_to_next_edit = "packages"
|
||||
fraction = 1.0
|
||||
elif self.current_step == self.PACKAGE_GENERATING:
|
||||
fail_to_next_edit = "recipes"
|
||||
fraction = 1.0
|
||||
self.build_details_page.show_fail_page(fail_to_next_edit.split(' ')[0], fail_to_next_edit)
|
||||
status = "fail"
|
||||
message = "Build failed: "
|
||||
self.build_details_page.update_progress_bar(message, fraction, status)
|
||||
@@ -919,7 +816,7 @@ class Builder(gtk.Window):
|
||||
self.build_failed()
|
||||
|
||||
def handler_no_provider_cb(self, running_build, msg):
|
||||
dialog = CrumbsMessageDialog(self, Builder.interpret_markup(msg), gtk.STOCK_DIALOG_INFO)
|
||||
dialog = CrumbsMessageDialog(self, msg, gtk.STOCK_DIALOG_INFO)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
dialog.run()
|
||||
@@ -1006,7 +903,7 @@ class Builder(gtk.Window):
|
||||
selected_packages = self.package_model.get_selected_packages() or []
|
||||
|
||||
# If no base image and no selected packages don't build anything
|
||||
if not (selected_packages or selected_image != self.recipe_model.__custom_image__):
|
||||
if not (selected_packages or selected_image != self.recipe_model.__dummy_image__):
|
||||
lbl = "<b>No selections made</b>\nYou have not made any selections"
|
||||
lbl = lbl + " so there isn't anything to bake at this time."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
@@ -1112,6 +1009,7 @@ class Builder(gtk.Window):
|
||||
all_distros = self.parameters.all_distros,
|
||||
all_sdk_machines = self.parameters.all_sdk_machines,
|
||||
max_threads = self.parameters.max_threads,
|
||||
enable_proxy = self.parameters.enable_proxy,
|
||||
parent = self,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
@@ -1123,6 +1021,7 @@ class Builder(gtk.Window):
|
||||
response = dialog.run()
|
||||
settings_changed = False
|
||||
if response == gtk.RESPONSE_YES:
|
||||
self.parameters.enable_proxy = dialog.enable_proxy
|
||||
self.configuration = dialog.configuration
|
||||
self.save_defaults() # remember settings
|
||||
settings_changed = dialog.settings_changed
|
||||
@@ -1161,7 +1060,16 @@ class Builder(gtk.Window):
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def show_load_kernel_dialog(self):
|
||||
def runqemu_image(self, image_name):
|
||||
if not image_name:
|
||||
lbl = "<b>Please select an image to launch in QEMU.</b>"
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
dialog = gtk.FileChooserDialog("Load Kernel Files", self,
|
||||
gtk.FILE_CHOOSER_ACTION_SAVE)
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
@@ -1176,50 +1084,35 @@ class Builder(gtk.Window):
|
||||
dialog.set_current_folder(self.parameters.image_addr)
|
||||
|
||||
response = dialog.run()
|
||||
kernel_path = ""
|
||||
if response == gtk.RESPONSE_YES:
|
||||
kernel_path = dialog.get_filename()
|
||||
|
||||
image_path = os.path.join(self.parameters.image_addr, image_name)
|
||||
dialog.destroy()
|
||||
|
||||
return kernel_path
|
||||
|
||||
def runqemu_image(self, image_name, kernel_name):
|
||||
if not image_name or not kernel_name:
|
||||
lbl = "<b>Please select an %s to launch in QEMU.</b>" % ("kernel" if image_name else "image")
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
kernel_path = os.path.join(self.parameters.image_addr, kernel_name)
|
||||
image_path = os.path.join(self.parameters.image_addr, image_name)
|
||||
|
||||
source_env_path = os.path.join(self.parameters.core_base, "oe-init-build-env")
|
||||
tmp_path = self.parameters.tmpdir
|
||||
cmdline = bb.ui.crumbs.utils.which_terminal()
|
||||
if os.path.exists(image_path) and os.path.exists(kernel_path) \
|
||||
and os.path.exists(source_env_path) and os.path.exists(tmp_path) \
|
||||
and cmdline:
|
||||
cmdline += "\' bash -c \"export OE_TMPDIR=" + tmp_path + "; "
|
||||
cmdline += "source " + source_env_path + " " + os.getcwd() + "; "
|
||||
cmdline += "runqemu " + kernel_path + " " + image_path + "\"\'"
|
||||
subprocess.Popen(shlex.split(cmdline))
|
||||
else:
|
||||
lbl = "<b>Path error</b>\nOne of your paths is wrong,"
|
||||
lbl = lbl + " please make sure the following paths exist:\n"
|
||||
lbl = lbl + "image path:" + image_path + "\n"
|
||||
lbl = lbl + "kernel path:" + kernel_path + "\n"
|
||||
lbl = lbl + "source environment path:" + source_env_path + "\n"
|
||||
lbl = lbl + "tmp path: " + tmp_path + "."
|
||||
lbl = lbl + "You may be missing either xterm or vte for terminal services."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_ERROR)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
if response == gtk.RESPONSE_YES:
|
||||
source_env_path = os.path.join(self.parameters.core_base, "oe-init-build-env")
|
||||
tmp_path = self.parameters.tmpdir
|
||||
cmdline = bb.ui.crumbs.utils.which_terminal()
|
||||
if os.path.exists(image_path) and os.path.exists(kernel_path) \
|
||||
and os.path.exists(source_env_path) and os.path.exists(tmp_path) \
|
||||
and cmdline:
|
||||
cmdline += "\' bash -c \"export OE_TMPDIR=" + tmp_path + "; "
|
||||
cmdline += "source " + source_env_path + " " + os.getcwd() + "; "
|
||||
cmdline += "runqemu " + kernel_path + " " + image_path + "\"\'"
|
||||
subprocess.Popen(shlex.split(cmdline))
|
||||
else:
|
||||
lbl = "<b>Path error</b>\nOne of your paths is wrong,"
|
||||
lbl = lbl + " please make sure the following paths exist:\n"
|
||||
lbl = lbl + "image path:" + image_path + "\n"
|
||||
lbl = lbl + "kernel path:" + kernel_path + "\n"
|
||||
lbl = lbl + "source environment path:" + source_env_path + "\n"
|
||||
lbl = lbl + "tmp path: " + tmp_path + "."
|
||||
lbl = lbl + "You may be missing either xterm or vte for terminal services."
|
||||
dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_ERROR)
|
||||
button = dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
def show_packages(self, ask=True):
|
||||
_, selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
|
||||
@@ -20,18 +20,17 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import glob
|
||||
import gtk
|
||||
import gobject
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import shlex
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hcc, hic, HobViewTable, HobInfoButton, HobButton, HobAltButton, HobIconChecker
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
import bb.ui.crumbs.utils
|
||||
import bb.process
|
||||
|
||||
"""
|
||||
The following are convenience classes for implementing GNOME HIG compliant
|
||||
@@ -64,7 +63,7 @@ class CrumbsMessageDialog(CrumbsDialog):
|
||||
"""
|
||||
def __init__(self, parent=None, label="", icon=gtk.STOCK_INFO):
|
||||
super(CrumbsMessageDialog, self).__init__("", parent, gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
|
||||
|
||||
self.set_border_width(6)
|
||||
self.vbox.set_property("spacing", 12)
|
||||
self.action_area.set_property("spacing", 12)
|
||||
@@ -138,8 +137,6 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
def entry_widget_select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
text = entry.get_text()
|
||||
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
@@ -175,45 +172,6 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
hbox.show_all()
|
||||
return hbox, entry
|
||||
|
||||
def details_cb(self, button, parent, protocol):
|
||||
dialog = ProxyDetailsDialog(title = protocol.upper() + " Proxy Details",
|
||||
user = self.configuration.proxies[protocol][1],
|
||||
passwd = self.configuration.proxies[protocol][2],
|
||||
parent = parent,
|
||||
flags = gtk.DIALOG_MODAL
|
||||
| gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR)
|
||||
dialog.add_button(gtk.STOCK_CLOSE, gtk.RESPONSE_OK)
|
||||
response = dialog.run()
|
||||
if response == gtk.RESPONSE_OK:
|
||||
self.configuration.proxies[protocol][1] = dialog.user
|
||||
self.configuration.proxies[protocol][2] = dialog.passwd
|
||||
self.refresh_proxy_components()
|
||||
dialog.destroy()
|
||||
|
||||
def gen_proxy_entry_widget(self, protocol, parent, need_button=True):
|
||||
hbox = gtk.HBox(False, 12)
|
||||
|
||||
label = gtk.Label(protocol.upper() + " proxy")
|
||||
hbox.pack_start(label, expand=True, fill=False, padding=24)
|
||||
|
||||
proxy_entry = gtk.Entry()
|
||||
proxy_entry.set_size_request(300, -1)
|
||||
hbox.pack_start(proxy_entry, expand=False, fill=False)
|
||||
|
||||
hbox.pack_start(gtk.Label(":"), expand=False, fill=False)
|
||||
|
||||
port_entry = gtk.Entry()
|
||||
port_entry.set_size_request(60, -1)
|
||||
hbox.pack_start(port_entry, expand=False, fill=False)
|
||||
|
||||
details_button = HobAltButton("Details")
|
||||
details_button.connect("clicked", self.details_cb, parent, protocol)
|
||||
hbox.pack_start(details_button, expand=False, fill=False)
|
||||
|
||||
hbox.show_all()
|
||||
return hbox, proxy_entry, port_entry, details_button
|
||||
|
||||
def rootfs_combo_changed_cb(self, rootfs_combo, all_package_format, check_hbox):
|
||||
combo_item = self.rootfs_combo.get_active_text()
|
||||
for child in check_hbox.get_children():
|
||||
@@ -351,7 +309,7 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, configuration, all_image_types,
|
||||
all_package_formats, all_distros, all_sdk_machines,
|
||||
max_threads, parent, flags, buttons=None):
|
||||
max_threads, enable_proxy, parent, flags, buttons=None):
|
||||
super(AdvancedSettingDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
# class members from other objects
|
||||
@@ -362,6 +320,7 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
self.all_distros = all_distros
|
||||
self.all_sdk_machines = all_sdk_machines
|
||||
self.max_threads = max_threads
|
||||
self.enable_proxy = enable_proxy
|
||||
|
||||
# class members for internal use
|
||||
self.distro_combo = None
|
||||
@@ -397,10 +356,15 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
data += ("SDK_MACHINE: " + self._get_sorted_value(self.configuration.curr_sdk_machine))
|
||||
data += ("TOOLCHAIN_BUILD: " + self._get_sorted_value(self.configuration.toolchain_build))
|
||||
data += ("IMAGE_FSTYPES: " + self._get_sorted_value(self.configuration.image_fstypes))
|
||||
data += ("ENABLE_PROXY: " + self._get_sorted_value(self.configuration.enable_proxy))
|
||||
if self.configuration.enable_proxy:
|
||||
for protocol in self.configuration.proxies.keys():
|
||||
data += (protocol + ": " + self._get_sorted_value(self.configuration.combine_proxy(protocol)))
|
||||
if self.enable_proxy:
|
||||
data += ("ALL_PROXY: " + self._get_sorted_value(self.configuration.all_proxy))
|
||||
data += ("HTTP_PROXY: " + self._get_sorted_value(self.configuration.http_proxy))
|
||||
data += ("HTTPS_PROXY: " + self._get_sorted_value(self.configuration.https_proxy))
|
||||
data += ("FTP_PROXY: " + self._get_sorted_value(self.configuration.ftp_proxy))
|
||||
data += ("GIT_PROXY_HOST: " + self._get_sorted_value(self.configuration.git_proxy_host))
|
||||
data += ("GIT_PROXY_PORT: " + self._get_sorted_value(self.configuration.git_proxy_port))
|
||||
data += ("CVS_PROXY_HOST: " + self._get_sorted_value(self.configuration.cvs_proxy_host))
|
||||
data += ("CVS_PROXY_PORT: " + self._get_sorted_value(self.configuration.cvs_proxy_port))
|
||||
for key in self.configuration.extra_setting.keys():
|
||||
data += (key + ": " + self._get_sorted_value(self.configuration.extra_setting[key]))
|
||||
return hashlib.md5(data).hexdigest()
|
||||
@@ -565,56 +529,60 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
|
||||
sub_vbox = gtk.VBox(False, 6)
|
||||
advanced_vbox.pack_start(sub_vbox, expand=False, fill=False)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set the proxies that will be used during fetching source code</span>")
|
||||
tooltip = "Set the proxies that will be used during fetching source code or set none for direct the Internet connection"
|
||||
info = HobInfoButton(tooltip, self)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.pack_start(label, expand=True, fill=True)
|
||||
hbox.pack_start(info, expand=False, fill=False)
|
||||
sub_vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
self.direct_checkbox = gtk.RadioButton(None, "Direct internet connection")
|
||||
self.direct_checkbox.set_tooltip_text("Check this box to connect the Internet directly without any proxy")
|
||||
self.direct_checkbox.set_active(not self.configuration.enable_proxy)
|
||||
sub_vbox.pack_start(self.direct_checkbox, expand=False, fill=False)
|
||||
|
||||
self.proxy_checkbox = gtk.RadioButton(self.direct_checkbox, "Manual proxy configuration")
|
||||
self.proxy_checkbox = gtk.CheckButton("Enable proxy")
|
||||
self.proxy_checkbox.set_tooltip_text("Check this box to setup the proxy you specified")
|
||||
self.proxy_checkbox.set_active(self.configuration.enable_proxy)
|
||||
self.proxy_checkbox.set_active(self.enable_proxy)
|
||||
self.proxy_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
|
||||
sub_vbox.pack_start(self.proxy_checkbox, expand=False, fill=False)
|
||||
|
||||
self.same_checkbox = gtk.CheckButton("Use the same proxy for all protocols")
|
||||
self.same_checkbox.set_tooltip_text("Use the same proxy as the first proxy i.e. http proxy for all protocols")
|
||||
self.same_checkbox.set_active(self.configuration.same_proxy)
|
||||
hbox = gtk.HBox(False, 12)
|
||||
hbox.pack_start(self.same_checkbox, expand=False, fill=False, padding=24)
|
||||
sub_vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
proxy_widget, self.http_proxy, self.http_proxy_port, self.http_proxy_details = self.gen_proxy_entry_widget(
|
||||
"http", self, True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set all proxy:</span>")
|
||||
tooltip = "Set the all proxy that will be used if the proxy for a URL isn't specified."
|
||||
proxy_widget, self.all_proxy_text = self.gen_entry_widget(self.configuration.all_proxy, self, tooltip, False)
|
||||
self.all_proxy_text.set_editable(self.enable_proxy)
|
||||
self.all_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
proxy_widget, self.https_proxy, self.https_proxy_port, self.https_proxy_details = self.gen_proxy_entry_widget(
|
||||
"https", self, True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set http proxy:</span>")
|
||||
tooltip = "Set the http proxy that will be used in do_fetch() source code"
|
||||
proxy_widget, self.http_proxy_text = self.gen_entry_widget(self.configuration.http_proxy, self, tooltip, False)
|
||||
self.http_proxy_text.set_editable(self.enable_proxy)
|
||||
self.http_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
proxy_widget, self.ftp_proxy, self.ftp_proxy_port, self.ftp_proxy_details = self.gen_proxy_entry_widget(
|
||||
"ftp", self, True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set https proxy:</span>")
|
||||
tooltip = "Set the https proxy that will be used in do_fetch() source code"
|
||||
proxy_widget, self.https_proxy_text = self.gen_entry_widget(self.configuration.https_proxy, self, tooltip, False)
|
||||
self.https_proxy_text.set_editable(self.enable_proxy)
|
||||
self.https_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
proxy_widget, self.git_proxy, self.git_proxy_port, self.git_proxy_details = self.gen_proxy_entry_widget(
|
||||
"git", self, True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set ftp proxy:</span>")
|
||||
tooltip = "Set the ftp proxy that will be used in do_fetch() source code"
|
||||
proxy_widget, self.ftp_proxy_text = self.gen_entry_widget(self.configuration.ftp_proxy, self, tooltip, False)
|
||||
self.ftp_proxy_text.set_editable(self.enable_proxy)
|
||||
self.ftp_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
proxy_widget, self.cvs_proxy, self.cvs_proxy_port, self.cvs_proxy_details = self.gen_proxy_entry_widget(
|
||||
"cvs", self, True)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set git proxy:</span>")
|
||||
tooltip = "Set the git proxy that will be used in do_fetch() source code"
|
||||
proxy_widget, self.git_proxy_text = self.gen_entry_widget(self.configuration.git_proxy_host + ':' + self.configuration.git_proxy_port, self, tooltip, False)
|
||||
self.git_proxy_text.set_editable(self.enable_proxy)
|
||||
self.git_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
self.direct_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
|
||||
self.proxy_checkbox.connect("toggled", self.proxy_checkbox_toggled_cb)
|
||||
self.same_checkbox.connect("toggled", self.same_checkbox_toggled_cb)
|
||||
label = self.gen_label_widget("<span weight=\"bold\">Set cvs proxy:</span>")
|
||||
tooltip = "Set the cvs proxy that will be used in do_fetch() source code"
|
||||
proxy_widget, self.cvs_proxy_text = self.gen_entry_widget(self.configuration.cvs_proxy_host + ':' + self.configuration.cvs_proxy_port, self, tooltip, False)
|
||||
self.cvs_proxy_text.set_editable(self.enable_proxy)
|
||||
self.cvs_proxy_text.set_sensitive(self.enable_proxy)
|
||||
sub_vbox.pack_start(label, expand=False, fill=False)
|
||||
sub_vbox.pack_start(proxy_widget, expand=False, fill=False)
|
||||
|
||||
self.refresh_proxy_components()
|
||||
return advanced_vbox
|
||||
|
||||
def create_others_page(self):
|
||||
@@ -631,59 +599,20 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
|
||||
return advanced_vbox
|
||||
|
||||
def refresh_proxy_components(self):
|
||||
self.same_checkbox.set_sensitive(self.configuration.enable_proxy)
|
||||
|
||||
self.http_proxy.set_text(self.configuration.combine_host_only("http"))
|
||||
self.http_proxy.set_editable(self.configuration.enable_proxy)
|
||||
self.http_proxy.set_sensitive(self.configuration.enable_proxy)
|
||||
self.http_proxy_port.set_text(self.configuration.combine_port_only("http"))
|
||||
self.http_proxy_port.set_editable(self.configuration.enable_proxy)
|
||||
self.http_proxy_port.set_sensitive(self.configuration.enable_proxy)
|
||||
self.http_proxy_details.set_sensitive(self.configuration.enable_proxy)
|
||||
|
||||
self.https_proxy.set_text(self.configuration.combine_host_only("https"))
|
||||
self.https_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_port.set_text(self.configuration.combine_port_only("https"))
|
||||
self.https_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.https_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.ftp_proxy.set_text(self.configuration.combine_host_only("ftp"))
|
||||
self.ftp_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_port.set_text(self.configuration.combine_port_only("ftp"))
|
||||
self.ftp_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.ftp_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.git_proxy.set_text(self.configuration.combine_host_only("git"))
|
||||
self.git_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.git_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.git_proxy_port.set_text(self.configuration.combine_port_only("git"))
|
||||
self.git_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.git_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.git_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
self.cvs_proxy.set_text(self.configuration.combine_host_only("cvs"))
|
||||
self.cvs_proxy.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_port.set_text(self.configuration.combine_port_only("cvs"))
|
||||
self.cvs_proxy_port.set_editable(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_port.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
self.cvs_proxy_details.set_sensitive(self.configuration.enable_proxy and (not self.configuration.same_proxy))
|
||||
|
||||
def proxy_checkbox_toggled_cb(self, button):
|
||||
self.configuration.enable_proxy = self.proxy_checkbox.get_active()
|
||||
if not self.configuration.enable_proxy:
|
||||
self.configuration.same_proxy = False
|
||||
self.same_checkbox.set_active(self.configuration.same_proxy)
|
||||
self.refresh_proxy_components()
|
||||
|
||||
def same_checkbox_toggled_cb(self, button):
|
||||
self.configuration.same_proxy = self.same_checkbox.get_active()
|
||||
self.refresh_proxy_components()
|
||||
self.enable_proxy = self.proxy_checkbox.get_active()
|
||||
self.all_proxy_text.set_editable(self.enable_proxy)
|
||||
self.all_proxy_text.set_sensitive(self.enable_proxy)
|
||||
self.http_proxy_text.set_editable(self.enable_proxy)
|
||||
self.http_proxy_text.set_sensitive(self.enable_proxy)
|
||||
self.https_proxy_text.set_editable(self.enable_proxy)
|
||||
self.https_proxy_text.set_sensitive(self.enable_proxy)
|
||||
self.ftp_proxy_text.set_editable(self.enable_proxy)
|
||||
self.ftp_proxy_text.set_sensitive(self.enable_proxy)
|
||||
self.git_proxy_text.set_editable(self.enable_proxy)
|
||||
self.git_proxy_text.set_sensitive(self.enable_proxy)
|
||||
self.cvs_proxy_text.set_editable(self.enable_proxy)
|
||||
self.cvs_proxy_text.set_sensitive(self.enable_proxy)
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
package_format = []
|
||||
@@ -727,17 +656,12 @@ class AdvancedSettingDialog (CrumbsDialog):
|
||||
self.configuration.extra_setting[key] = value
|
||||
it = self.setting_store.iter_next(it)
|
||||
|
||||
self.configuration.split_proxy("http", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
if self.configuration.same_proxy:
|
||||
self.configuration.split_proxy("https", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("ftp", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("git", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
self.configuration.split_proxy("cvs", self.http_proxy.get_text() + ":" + self.http_proxy_port.get_text())
|
||||
else:
|
||||
self.configuration.split_proxy("https", self.https_proxy.get_text() + ":" + self.https_proxy_port.get_text())
|
||||
self.configuration.split_proxy("ftp", self.ftp_proxy.get_text() + ":" + self.ftp_proxy_port.get_text())
|
||||
self.configuration.split_proxy("git", self.git_proxy.get_text() + ":" + self.git_proxy_port.get_text())
|
||||
self.configuration.split_proxy("cvs", self.cvs_proxy.get_text() + ":" + self.cvs_proxy_port.get_text())
|
||||
self.configuration.all_proxy = self.all_proxy_text.get_text()
|
||||
self.configuration.http_proxy = self.http_proxy_text.get_text()
|
||||
self.configuration.https_proxy = self.https_proxy_text.get_text()
|
||||
self.configuration.ftp_proxy = self.ftp_proxy_text.get_text()
|
||||
self.configuration.git_proxy_host, self.configuration.git_proxy_port = self.git_proxy_text.get_text().split(':')
|
||||
self.configuration.cvs_proxy_host, self.configuration.cvs_proxy_port = self.cvs_proxy_text.get_text().split(':')
|
||||
|
||||
md5 = self.config_md5()
|
||||
self.settings_changed = (self.md5 != md5)
|
||||
@@ -749,28 +673,21 @@ class DeployImageDialog (CrumbsDialog):
|
||||
|
||||
__dummy_usb__ = "--select a usb drive--"
|
||||
|
||||
def __init__(self, title, image_path, parent, flags, buttons=None, standalone=False):
|
||||
def __init__(self, title, image_path, parent, flags, buttons=None):
|
||||
super(DeployImageDialog, self).__init__(title, parent, flags, buttons)
|
||||
|
||||
self.image_path = image_path
|
||||
self.standalone = standalone
|
||||
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.set_size_request(600, 400)
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
markup = "<span font_desc='12'>The image to be written into usb drive:</span>"
|
||||
label.set_markup(markup)
|
||||
self.vbox.pack_start(label, expand=False, fill=False, padding=2)
|
||||
|
||||
table = gtk.Table(2, 10, False)
|
||||
table.set_col_spacings(5)
|
||||
table.set_row_spacings(5)
|
||||
self.vbox.pack_start(table, expand=True, fill=True)
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
@@ -778,26 +695,11 @@ class DeployImageDialog (CrumbsDialog):
|
||||
tv.set_editable(False)
|
||||
tv.set_wrap_mode(gtk.WRAP_WORD)
|
||||
tv.set_cursor_visible(False)
|
||||
self.buf = gtk.TextBuffer()
|
||||
self.buf.set_text(self.image_path)
|
||||
tv.set_buffer(self.buf)
|
||||
buf = gtk.TextBuffer()
|
||||
buf.set_text(self.image_path)
|
||||
tv.set_buffer(buf)
|
||||
scroll.add(tv)
|
||||
table.attach(scroll, 0, 10, 0, 1)
|
||||
|
||||
if self.standalone:
|
||||
gobject.signal_new("select_image_clicked", self, gobject.SIGNAL_RUN_FIRST,
|
||||
gobject.TYPE_NONE, ())
|
||||
icon = gtk.Image()
|
||||
pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_IMAGES_DISPLAY_FILE)
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
button = gtk.Button("Select Image")
|
||||
button.set_image(icon)
|
||||
button.set_size_request(140, 50)
|
||||
table.attach(button, 9, 10, 1, 2, gtk.FILL, 0, 0, 0)
|
||||
button.connect("clicked", self.select_image_button_clicked_cb)
|
||||
|
||||
separator = gtk.HSeparator()
|
||||
self.vbox.pack_start(separator, expand=False, fill=False, padding=10)
|
||||
self.vbox.pack_start(scroll, expand=True, fill=True)
|
||||
|
||||
self.usb_desc = gtk.Label()
|
||||
self.usb_desc.set_alignment(0.0, 0.5)
|
||||
@@ -812,7 +714,7 @@ class DeployImageDialog (CrumbsDialog):
|
||||
for usb in self.find_all_usb_devices():
|
||||
self.usb_combo.append_text("/dev/" + usb)
|
||||
self.usb_combo.set_active(0)
|
||||
self.vbox.pack_start(self.usb_combo, expand=False, fill=False)
|
||||
self.vbox.pack_start(self.usb_combo, expand=True, fill=True)
|
||||
self.vbox.pack_start(self.usb_desc, expand=False, fill=False, padding=2)
|
||||
|
||||
self.progress_bar = HobProgressBar()
|
||||
@@ -823,19 +725,12 @@ class DeployImageDialog (CrumbsDialog):
|
||||
self.vbox.show_all()
|
||||
self.progress_bar.hide()
|
||||
|
||||
def set_image_text_buffer(self, image_path):
|
||||
self.buf.set_text(image_path)
|
||||
|
||||
def set_image_path(self, image_path):
|
||||
self.image_path = image_path
|
||||
|
||||
def popen_read(self, cmd):
|
||||
tmpout, errors = bb.process.run("%s" % cmd)
|
||||
return tmpout.strip()
|
||||
return os.popen("%s 2>/dev/null" % cmd).read().strip()
|
||||
|
||||
def find_all_usb_devices(self):
|
||||
usb_devs = [ os.readlink(u)
|
||||
for u in glob.glob('/dev/disk/by-id/usb*')
|
||||
for u in self.popen_read('ls /dev/disk/by-id/usb*').split()
|
||||
if not re.search(r'part\d+', u) ]
|
||||
return [ '%s' % u[u.rfind('/')+1:] for u in usb_devs ]
|
||||
|
||||
@@ -844,9 +739,6 @@ class DeployImageDialog (CrumbsDialog):
|
||||
(self.popen_read('cat /sys/class/block/%s/device/vendor' % dev),
|
||||
self.popen_read('cat /sys/class/block/%s/device/model' % dev))
|
||||
|
||||
def select_image_button_clicked_cb(self, button):
|
||||
self.emit('select_image_clicked')
|
||||
|
||||
def usb_combo_changed_cb(self, usb_combo):
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_usb__:
|
||||
@@ -858,38 +750,12 @@ class DeployImageDialog (CrumbsDialog):
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_YES:
|
||||
lbl = ''
|
||||
combo_item = self.usb_combo.get_active_text()
|
||||
if combo_item and combo_item != self.__dummy_usb__ and self.image_path:
|
||||
if combo_item and combo_item != self.__dummy_usb__:
|
||||
cmdline = bb.ui.crumbs.utils.which_terminal()
|
||||
if cmdline:
|
||||
tmpname = os.tmpnam()
|
||||
cmdline += "\"sudo dd if=" + self.image_path + \
|
||||
" of=" + combo_item + "; echo $? > " + tmpname + "\""
|
||||
deploy_process = bb.process.Popen(shlex.split(cmdline))
|
||||
deploy_process.wait()
|
||||
|
||||
# if file tmpname not exists, that means there is something wrong with xterm
|
||||
# user can get the error message from xterm so no more warning need.
|
||||
if os.path.exists(tmpname):
|
||||
tmpfile = open(tmpname)
|
||||
if int(tmpfile.readline().strip()) == 0:
|
||||
lbl = "<b>Deploy image successfully</b>"
|
||||
else:
|
||||
lbl = "<b>Deploy image failed</b>\nPlease try again."
|
||||
tmpfile.close()
|
||||
os.remove(tmpname)
|
||||
else:
|
||||
if not self.image_path:
|
||||
lbl = "<b>No selection made</b>\nYou have not selected an image to deploy"
|
||||
else:
|
||||
lbl = "<b>No selection made</b>\nYou have not selected USB device"
|
||||
if len(lbl):
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
cmdline += "\"sudo dd if=" + self.image_path + " of=" + combo_item + "\""
|
||||
subprocess.Popen(args=shlex.split(cmdline))
|
||||
|
||||
def update_progress_bar(self, title, fraction, status=None):
|
||||
self.progress_bar.update(fraction)
|
||||
@@ -1093,7 +959,7 @@ class LayerSelectionDialog (CrumbsDialog):
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
|
||||
def create_visual_elements(self):
|
||||
layer_widget, self.layer_store = self.gen_layer_widget(self.layers, self.all_layers, self, None)
|
||||
layer_widget.set_size_request(450, 250)
|
||||
@@ -1207,13 +1073,12 @@ class ImageSelectionDialog (CrumbsDialog):
|
||||
self.image_table = HobViewTable(self.__columns__)
|
||||
self.image_table.set_size_request(-1, 300)
|
||||
self.image_table.connect("toggled", self.toggled_cb)
|
||||
self.image_table.connect_group_selection(self.table_selected_cb)
|
||||
self.image_table.connect("row-activated", self.row_actived_cb)
|
||||
self.vbox.pack_start(self.image_table, expand=True, fill=True)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def change_image_cb(self, model, path, columnid):
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
@@ -1224,24 +1089,9 @@ class ImageSelectionDialog (CrumbsDialog):
|
||||
|
||||
model[path][columnid] = True
|
||||
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
self.change_image_cb(model, path, columnid)
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if paths:
|
||||
self.change_image_cb(model, paths[0], 1)
|
||||
|
||||
def row_actived_cb(self, tab, model, path):
|
||||
self.change_image_cb(model, path, 1)
|
||||
self.emit('response', gtk.RESPONSE_YES)
|
||||
|
||||
def select_path_cb(self, action, parent, entry):
|
||||
dialog = gtk.FileChooserDialog("", parent,
|
||||
gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
|
||||
text = entry.get_text()
|
||||
dialog.set_current_folder(text if len(text) > 0 else os.getcwd())
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
@@ -1268,7 +1118,7 @@ class ImageSelectionDialog (CrumbsDialog):
|
||||
if f.endswith('.' + real_image_type):
|
||||
imageset.add(f.rsplit('.' + real_image_type)[0].rsplit('.rootfs')[0])
|
||||
self.image_list.append(f)
|
||||
|
||||
|
||||
for image in imageset:
|
||||
self.image_store.set(self.image_store.append(), 0, image, 1, False)
|
||||
|
||||
@@ -1284,65 +1134,5 @@ class ImageSelectionDialog (CrumbsDialog):
|
||||
for f in self.image_list:
|
||||
if f.startswith(self.image_store[path][0] + '.'):
|
||||
self.image_names.append(f)
|
||||
break
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
|
||||
class ProxyDetailsDialog (CrumbsDialog):
|
||||
|
||||
def __init__(self, title, user, passwd, parent, flags, buttons=None):
|
||||
super(ProxyDetailsDialog, self).__init__(title, parent, flags, buttons)
|
||||
self.connect("response", self.response_cb)
|
||||
|
||||
self.auth = not (user == None or passwd == None or user == "")
|
||||
self.user = user or ""
|
||||
self.passwd = passwd or ""
|
||||
|
||||
# create visual elements on the dialog
|
||||
self.create_visual_elements()
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.auth_checkbox = gtk.CheckButton("Use authentication")
|
||||
self.auth_checkbox.set_tooltip_text("Check this box to set the username and the password")
|
||||
self.auth_checkbox.set_active(self.auth)
|
||||
self.auth_checkbox.connect("toggled", self.auth_checkbox_toggled_cb)
|
||||
self.vbox.pack_start(self.auth_checkbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.user_label = gtk.Label("Username:")
|
||||
self.user_text = gtk.Entry()
|
||||
self.user_text.set_text(self.user)
|
||||
hbox.pack_start(self.user_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.user_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
hbox = gtk.HBox(False, 6)
|
||||
self.passwd_label = gtk.Label("Password:")
|
||||
self.passwd_text = gtk.Entry()
|
||||
self.passwd_text.set_text(self.passwd)
|
||||
hbox.pack_start(self.passwd_label, expand=False, fill=False)
|
||||
hbox.pack_end(self.passwd_text, expand=False, fill=False)
|
||||
self.vbox.pack_start(hbox, expand=False, fill=False)
|
||||
|
||||
self.refresh_auth_components()
|
||||
self.show_all()
|
||||
|
||||
def refresh_auth_components(self):
|
||||
self.user_label.set_sensitive(self.auth)
|
||||
self.user_text.set_editable(self.auth)
|
||||
self.user_text.set_sensitive(self.auth)
|
||||
self.passwd_label.set_sensitive(self.auth)
|
||||
self.passwd_text.set_editable(self.auth)
|
||||
self.passwd_text.set_sensitive(self.auth)
|
||||
|
||||
def auth_checkbox_toggled_cb(self, button):
|
||||
self.auth = self.auth_checkbox.get_active()
|
||||
self.refresh_auth_components()
|
||||
|
||||
def response_cb(self, dialog, response_id):
|
||||
if response_id == gtk.RESPONSE_OK:
|
||||
if self.auth:
|
||||
self.user = self.user_text.get_text()
|
||||
self.passwd = self.passwd_text.get_text()
|
||||
else:
|
||||
self.user = None
|
||||
self.passwd = None
|
||||
|
||||
@@ -318,6 +318,9 @@ class HobHandler(gobject.GObject):
|
||||
def set_ftp_proxy(self, ftp_proxy):
|
||||
self.runCommand(["setVariable", "ftp_proxy", ftp_proxy])
|
||||
|
||||
def set_all_proxy(self, all_proxy):
|
||||
self.runCommand(["setVariable", "all_proxy", all_proxy])
|
||||
|
||||
def set_git_proxy(self, host, port):
|
||||
self.runCommand(["setVariable", "GIT_PROXY_HOST", host])
|
||||
self.runCommand(["setVariable", "GIT_PROXY_PORT", port])
|
||||
@@ -347,7 +350,7 @@ class HobHandler(gobject.GObject):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_GNERATE_TGTS)
|
||||
self.run_next_command(self.GENERATE_RECIPES)
|
||||
|
||||
|
||||
def generate_packages(self, tgts, default_task="build"):
|
||||
targets = []
|
||||
targets.extend(tgts)
|
||||
@@ -492,7 +495,6 @@ class HobHandler(gobject.GObject):
|
||||
params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
|
||||
params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
|
||||
params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
|
||||
params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
|
||||
params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
|
||||
params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
|
||||
params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
|
||||
@@ -508,10 +510,9 @@ class HobHandler(gobject.GObject):
|
||||
params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
|
||||
params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
|
||||
params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
|
||||
params["all_proxy"] = self.runCommand(["getVariable", "all_proxy"]) or ""
|
||||
|
||||
params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
|
||||
params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
|
||||
|
||||
params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
|
||||
params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
|
||||
return params
|
||||
|
||||
@@ -34,7 +34,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT) = range(13)
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC, COL_FADE_INC) = range(12)
|
||||
|
||||
__gsignals__ = {
|
||||
"package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
@@ -65,8 +65,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING)
|
||||
gobject.TYPE_BOOLEAN)
|
||||
|
||||
|
||||
"""
|
||||
@@ -190,7 +189,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
self.COL_SEC, section, self.COL_SUM, summary,
|
||||
self.COL_RDEP, rdep + ' ' + rrec,
|
||||
self.COL_RPROV, rprov, self.COL_SIZE, size,
|
||||
self.COL_BINB, "", self.COL_INC, False, self.COL_FONT, '10')
|
||||
self.COL_BINB, "", self.COL_INC, False)
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
@@ -456,7 +455,7 @@ class RecipeListModel(gtk.ListStore):
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC) = range(12)
|
||||
|
||||
__custom_image__ = "Create your own image"
|
||||
__dummy_image__ = "Create your own image"
|
||||
|
||||
__gsignals__ = {
|
||||
"recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
@@ -565,14 +564,14 @@ class RecipeListModel(gtk.ListStore):
|
||||
self.clear()
|
||||
|
||||
# dummy image for prompt
|
||||
self.set(self.append(), self.COL_NAME, self.__custom_image__,
|
||||
self.set(self.append(), self.COL_NAME, self.__dummy_image__,
|
||||
self.COL_DESC, "Use the 'View recipes' and 'View packages' " \
|
||||
"options to select what you want to include " \
|
||||
"in your image.",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, "", self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__custom_image__)
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__dummy_image__)
|
||||
|
||||
for item in event_model["pn"]:
|
||||
name = item
|
||||
|
||||
@@ -23,7 +23,6 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
import pango, pangocairo
|
||||
import cairo
|
||||
import math
|
||||
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
@@ -120,7 +119,6 @@ class HobViewTable (gtk.VBox):
|
||||
self.table_tree.set_headers_clickable(True)
|
||||
self.table_tree.set_enable_search(True)
|
||||
self.table_tree.set_rules_hint(True)
|
||||
self.table_tree.set_enable_tree_lines(True)
|
||||
self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
self.toggle_columns = []
|
||||
self.table_tree.connect("row-activated", self.row_activated_cb)
|
||||
@@ -142,8 +140,6 @@ class HobViewTable (gtk.VBox):
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_attributes(cell, text=column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
elif column['col_style'] == 'check toggle':
|
||||
cell = HobCellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
@@ -153,8 +149,6 @@ class HobViewTable (gtk.VBox):
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(column['col_name'])
|
||||
if 'col_group' in column.keys():
|
||||
col.set_cell_data_func(cell, self.set_group_number_cb)
|
||||
elif column['col_style'] == 'radio toggle':
|
||||
cell = gtk.CellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
@@ -168,8 +162,6 @@ class HobViewTable (gtk.VBox):
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
@@ -212,15 +204,6 @@ class HobViewTable (gtk.VBox):
|
||||
def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
|
||||
self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
|
||||
|
||||
def set_group_number_cb(self, col, cell, model, iter):
|
||||
if model and (model.iter_parent(iter) == None):
|
||||
cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
|
||||
else:
|
||||
cell.cell_attr["number_of_children"] = 0
|
||||
|
||||
def connect_group_selection(self, cb_func):
|
||||
self.table_tree.get_selection().connect("changed", cb_func)
|
||||
|
||||
"""
|
||||
A method to calculate a softened value for the colour of widget when in the
|
||||
provided state.
|
||||
@@ -397,95 +380,363 @@ class HobInfoButton(gtk.EventBox):
|
||||
def mouse_out_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
|
||||
class HobIndicator(gtk.DrawingArea):
|
||||
def __init__(self, count):
|
||||
gtk.DrawingArea.__init__(self)
|
||||
# Set no window for transparent background
|
||||
self.set_has_window(False)
|
||||
self.set_size_request(38,38)
|
||||
# We need to pass through button clicks
|
||||
self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
class HobTabBar(gtk.DrawingArea):
|
||||
__gsignals__ = {
|
||||
"blank-area-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,)),
|
||||
|
||||
self.connect('expose-event', self.expose)
|
||||
"tab-switched" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
}
|
||||
|
||||
self.count = count
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
def expose(self, widget, event):
|
||||
if self.count and self.count > 0:
|
||||
ctx = widget.window.cairo_create()
|
||||
|
||||
x, y, w, h = self.allocation
|
||||
|
||||
ctx.set_operator(cairo.OPERATOR_OVER)
|
||||
ctx.set_source_color(gtk.gdk.color_parse(self.color))
|
||||
ctx.translate(w/2, h/2)
|
||||
ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
|
||||
ctx.fill_preserve()
|
||||
|
||||
layout = self.create_pango_layout(str(self.count))
|
||||
textw, texth = layout.get_pixel_size()
|
||||
x = (w/2)-(textw/2) + x
|
||||
y = (h/2) - (texth/2) + y
|
||||
ctx.move_to(x, y)
|
||||
self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
|
||||
|
||||
def set_count(self, count):
|
||||
self.count = count
|
||||
|
||||
def set_active(self, active):
|
||||
if active:
|
||||
self.color = HobColors.DEEP_RED
|
||||
else:
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
class HobTabLabel(gtk.HBox):
|
||||
def __init__(self, text, count=0):
|
||||
gtk.HBox.__init__(self, False, 0)
|
||||
self.indicator = HobIndicator(count)
|
||||
self.indicator.show()
|
||||
self.pack_end(self.indicator, False, False)
|
||||
self.lbl = gtk.Label(text)
|
||||
self.lbl.set_alignment(0.0, 0.5)
|
||||
self.lbl.show()
|
||||
self.pack_end(self.lbl, True, True, 6)
|
||||
|
||||
def set_count(self, count):
|
||||
self.indicator.set_count(count)
|
||||
|
||||
def set_active(self, active=True):
|
||||
self.indicator.set_active(active)
|
||||
|
||||
class HobNotebook(gtk.Notebook):
|
||||
def __init__(self):
|
||||
gtk.Notebook.__init__(self)
|
||||
self.set_property('homogeneous', True)
|
||||
gtk.DrawingArea.__init__(self)
|
||||
self.children = []
|
||||
|
||||
self.pages = []
|
||||
self.tab_width = 140
|
||||
self.tab_height = 52
|
||||
self.tab_x = 10
|
||||
self.tab_y = 0
|
||||
|
||||
self.width = 500
|
||||
self.height = 53
|
||||
self.tab_w_ratio = 140 * 1.0/500
|
||||
self.tab_h_ratio = 52 * 1.0/53
|
||||
self.set_size_request(self.width, self.height)
|
||||
|
||||
self.current_child = None
|
||||
self.font = self.get_style().font_desc
|
||||
self.font.set_size(pango.SCALE * 13)
|
||||
self.update_children_text_layout_and_bg_color()
|
||||
|
||||
self.blank_rectangle = None
|
||||
self.tab_pressed = False
|
||||
|
||||
self.set_property('can-focus', True)
|
||||
self.set_events(gtk.gdk.EXPOSURE_MASK | gtk.gdk.POINTER_MOTION_MASK |
|
||||
gtk.gdk.BUTTON1_MOTION_MASK | gtk.gdk.BUTTON_PRESS_MASK |
|
||||
gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
|
||||
self.connect("expose-event", self.on_draw)
|
||||
self.connect("button-press-event", self.button_pressed_cb)
|
||||
self.connect("button-release-event", self.button_released_cb)
|
||||
self.connect("query-tooltip", self.query_tooltip_cb)
|
||||
self.show_all()
|
||||
|
||||
def button_released_cb(self, widget, event):
|
||||
self.tab_pressed = False
|
||||
self.queue_draw()
|
||||
|
||||
def button_pressed_cb(self, widget, event):
|
||||
if event.type == gtk.gdk._2BUTTON_PRESS:
|
||||
return
|
||||
|
||||
result = False
|
||||
if self.is_focus() or event.type == gtk.gdk.BUTTON_PRESS:
|
||||
x, y = event.get_coords()
|
||||
# check which tab be clicked
|
||||
for child in self.children:
|
||||
if (child["x"] < x) and (x < child["x"] + self.tab_width) \
|
||||
and (child["y"] < y) and (y < child["y"] + self.tab_height):
|
||||
self.current_child = child
|
||||
result = True
|
||||
self.grab_focus()
|
||||
break
|
||||
|
||||
# check the blank area is focus in or not
|
||||
if (self.blank_rectangle) and (self.blank_rectangle.x > 0) and (self.blank_rectangle.y > 0):
|
||||
if (self.blank_rectangle.x < x) and (x < self.blank_rectangle.x + self.blank_rectangle.width) \
|
||||
and (self.blank_rectangle.y < y) and (y < self.blank_rectangle.y + self.blank_rectangle.height):
|
||||
self.grab_focus()
|
||||
|
||||
if result == True:
|
||||
page = self.current_child["toggled_page"]
|
||||
self.emit("tab-switched", page)
|
||||
self.tab_pressed = True
|
||||
self.queue_draw()
|
||||
|
||||
def update_children_size(self):
|
||||
# calculate the size of tabs
|
||||
self.tab_width = int(self.width * self.tab_w_ratio)
|
||||
self.tab_height = int(self.height * self.tab_h_ratio)
|
||||
for i, child in enumerate(self.children):
|
||||
child["x"] = self.tab_x + i * self.tab_width
|
||||
child["y"] = self.tab_y
|
||||
|
||||
if self.blank_rectangle:
|
||||
self.resize_blank_rectangle()
|
||||
|
||||
def resize_blank_rectangle(self):
|
||||
width = self.width - self.tab_width * len(self.children) - self.tab_x
|
||||
x = self.tab_x + self.tab_width * len(self.children)
|
||||
hpadding = vpadding = 5
|
||||
self.blank_rectangle = self.set_blank_size(x + hpadding, self.tab_y + vpadding,
|
||||
width - 2 * hpadding, self.tab_height - 2 * vpadding)
|
||||
|
||||
def update_children_text_layout_and_bg_color(self):
|
||||
style = self.get_style().copy()
|
||||
color = style.base[gtk.STATE_NORMAL]
|
||||
for child in self.children:
|
||||
pangolayout = self.create_pango_layout(child["title"])
|
||||
pangolayout.set_font_description(self.font)
|
||||
child["title_layout"] = pangolayout
|
||||
child["r"] = color.red
|
||||
child["g"] = color.green
|
||||
child["b"] = color.blue
|
||||
|
||||
def append_tab_child(self, title, page, tooltip=""):
|
||||
num = len(self.children) + 1
|
||||
self.tab_width = self.tab_width * len(self.children) / num
|
||||
|
||||
i = 0
|
||||
for i, child in enumerate(self.children):
|
||||
child["x"] = self.tab_x + i * self.tab_width
|
||||
i += 1
|
||||
|
||||
x = self.tab_x + i * self.tab_width
|
||||
y = self.tab_y
|
||||
pangolayout = self.create_pango_layout(title)
|
||||
pangolayout.set_font_description(self.font)
|
||||
color = self.style.base[gtk.STATE_NORMAL]
|
||||
new_one = {
|
||||
"x" : x,
|
||||
"y" : y,
|
||||
"r" : color.red,
|
||||
"g" : color.green,
|
||||
"b" : color.blue,
|
||||
"title_layout" : pangolayout,
|
||||
"toggled_page" : page,
|
||||
"title" : title,
|
||||
"indicator_show" : False,
|
||||
"indicator_number" : 0,
|
||||
"tooltip_markup" : tooltip,
|
||||
}
|
||||
self.children.append(new_one)
|
||||
if tooltip and (not self.props.has_tooltip):
|
||||
self.props.has_tooltip = True
|
||||
# set the default current child
|
||||
if not self.current_child:
|
||||
self.current_child = new_one
|
||||
|
||||
def on_draw(self, widget, event):
|
||||
cr = widget.window.cairo_create()
|
||||
|
||||
self.width = self.allocation.width
|
||||
self.height = self.allocation.height
|
||||
|
||||
self.update_children_size()
|
||||
|
||||
self.draw_background(cr)
|
||||
self.draw_toggled_tab(cr)
|
||||
|
||||
for child in self.children:
|
||||
if child["indicator_show"] == True:
|
||||
self.draw_indicator(cr, child)
|
||||
|
||||
self.draw_tab_text(cr)
|
||||
|
||||
def draw_background(self, cr):
|
||||
style = self.get_style()
|
||||
|
||||
if self.is_focus():
|
||||
cr.set_source_color(style.base[gtk.STATE_SELECTED])
|
||||
else:
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
|
||||
y = 6
|
||||
h = self.height - 6 - 1
|
||||
gap = 1
|
||||
|
||||
w = self.children[0]["x"]
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(0, y, w - gap, h) # start rectangle
|
||||
cr.fill()
|
||||
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
cr.rectangle(w - gap, y, w, h) #first gap
|
||||
cr.fill()
|
||||
|
||||
w = self.tab_width
|
||||
for child in self.children:
|
||||
x = child["x"]
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(x, y, w - gap, h) # tab rectangle
|
||||
cr.fill()
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
cr.rectangle(x + w - gap, y, w, h) # gap
|
||||
cr.fill()
|
||||
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(x + w, y, self.width - x - w, h) # last rectangle
|
||||
cr.fill()
|
||||
|
||||
def draw_tab_text(self, cr):
|
||||
style = self.get_style()
|
||||
|
||||
for child in self.children:
|
||||
pangolayout = child["title_layout"]
|
||||
if pangolayout:
|
||||
fontw, fonth = pangolayout.get_pixel_size()
|
||||
# center pos
|
||||
off_x = (self.tab_width - fontw) / 2
|
||||
off_y = (self.tab_height - fonth) / 2
|
||||
x = child["x"] + off_x
|
||||
y = child["y"] + off_y
|
||||
if not child == self.current_child:
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), pangolayout, gtk.gdk.Color(HobColors.WHITE))
|
||||
else:
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), pangolayout)
|
||||
|
||||
def draw_toggled_tab(self, cr):
|
||||
if not self.current_child:
|
||||
return
|
||||
x = self.current_child["x"]
|
||||
y = self.current_child["y"]
|
||||
width = self.tab_width
|
||||
height = self.tab_height
|
||||
style = self.get_style()
|
||||
color = style.base[gtk.STATE_NORMAL]
|
||||
|
||||
r = height / 10
|
||||
if self.tab_pressed == True:
|
||||
for xoff, yoff, c1, c2 in [(1, 0, HobColors.SLIGHT_DARK, HobColors.DARK), (2, 0, HobColors.GRAY, HobColors.LIGHT_GRAY)]:
|
||||
cr.set_source_color(gtk.gdk.color_parse(c1))
|
||||
cr.move_to(x + xoff, y + height + yoff)
|
||||
cr.line_to(x + xoff, r + yoff)
|
||||
cr.arc(x + r + xoff, y + r + yoff, r, math.pi, 1.5*math.pi)
|
||||
cr.move_to(x + r + xoff, y + yoff)
|
||||
cr.line_to(x + width - r + xoff, y + yoff)
|
||||
cr.arc(x + width - r + xoff, y + r + yoff, r, 1.5*math.pi, 2*math.pi)
|
||||
cr.stroke()
|
||||
cr.set_source_color(gtk.gdk.color_parse(c2))
|
||||
cr.move_to(x + width + xoff, r + yoff)
|
||||
cr.line_to(x + width + xoff, y + height + yoff)
|
||||
cr.line_to(x + xoff, y + height + yoff)
|
||||
cr.stroke()
|
||||
x = x + 2
|
||||
y = y + 2
|
||||
cr.set_source_rgba(color.red, color.green, color.blue, 1)
|
||||
cr.move_to(x + r, y)
|
||||
cr.line_to(x + width - r , y)
|
||||
cr.arc(x + width - r, y + r, r, 1.5*math.pi, 2*math.pi)
|
||||
cr.move_to(x + width, r)
|
||||
cr.line_to(x + width, y + height)
|
||||
cr.line_to(x, y + height)
|
||||
cr.line_to(x, r)
|
||||
cr.arc(x + r, y + r, r, math.pi, 1.5*math.pi)
|
||||
cr.fill()
|
||||
|
||||
def draw_indicator(self, cr, child):
|
||||
text = ("%d" % child["indicator_number"])
|
||||
layout = self.create_pango_layout(text)
|
||||
layout.set_font_description(self.font)
|
||||
textw, texth = layout.get_pixel_size()
|
||||
# draw the back round area
|
||||
tab_x = child["x"]
|
||||
tab_y = child["y"]
|
||||
dest_w = int(32 * self.tab_w_ratio)
|
||||
dest_h = int(32 * self.tab_h_ratio)
|
||||
if dest_h < self.tab_height:
|
||||
dest_w = dest_h
|
||||
# x position is offset(tab_width*3/4 - icon_width/2) + start_pos(tab_x)
|
||||
x = tab_x + self.tab_width * 3/4 - dest_w/2
|
||||
y = tab_y + self.tab_height/2 - dest_h/2
|
||||
|
||||
r = min(dest_w, dest_h)/2
|
||||
if not child == self.current_child:
|
||||
color = cr.set_source_color(gtk.gdk.color_parse(HobColors.DEEP_RED))
|
||||
else:
|
||||
color = cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
# check round back area can contain the text or not
|
||||
back_round_can_contain_width = float(2 * r * 0.707)
|
||||
if float(textw) > back_round_can_contain_width:
|
||||
xoff = (textw - int(back_round_can_contain_width)) / 2
|
||||
cr.move_to(x + r - xoff, y + r + r)
|
||||
cr.arc((x + r - xoff), (y + r), r, 0.5*math.pi, 1.5*math.pi)
|
||||
cr.fill() # left half round
|
||||
cr.rectangle((x + r - xoff), y, 2 * xoff, 2 * r)
|
||||
cr.fill() # center rectangle
|
||||
cr.arc((x + r + xoff), (y + r), r, 1.5*math.pi, 0.5*math.pi)
|
||||
cr.fill() # right half round
|
||||
else:
|
||||
cr.arc((x + r), (y + r), r, 0, 2*math.pi)
|
||||
cr.fill()
|
||||
# draw the number text
|
||||
x = x + (dest_w/2)-(textw/2)
|
||||
y = y + (dest_h/2) - (texth/2)
|
||||
cr.move_to(x, y)
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), layout, gtk.gdk.Color(HobColors.WHITE))
|
||||
|
||||
def show_indicator_icon(self, child, number):
|
||||
child["indicator_show"] = True
|
||||
child["indicator_number"] = number
|
||||
self.queue_draw()
|
||||
|
||||
def hide_indicator_icon(self, child):
|
||||
child["indicator_show"] = False
|
||||
self.queue_draw()
|
||||
|
||||
def set_blank_size(self, x, y, w, h):
|
||||
if not self.blank_rectangle or self.blank_rectangle.x != x or self.blank_rectangle.width != w:
|
||||
self.emit("blank-area-changed", x, y, w, h)
|
||||
|
||||
return gtk.gdk.Rectangle(x, y, w, h)
|
||||
|
||||
def query_tooltip_cb(self, widget, x, y, keyboardtip, tooltip):
|
||||
if keyboardtip or (not tooltip):
|
||||
return False
|
||||
# check which tab be clicked
|
||||
for child in self.children:
|
||||
if (child["x"] < x) and (x < child["x"] + self.tab_width) \
|
||||
and (child["y"] < y) and (y < child["y"] + self.tab_height):
|
||||
tooltip.set_markup(child["tooltip_markup"])
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class HobNotebook(gtk.VBox):
|
||||
|
||||
def __init__(self):
|
||||
gtk.VBox.__init__(self, False, 0)
|
||||
|
||||
self.notebook = gtk.Notebook()
|
||||
self.notebook.set_property('homogeneous', True)
|
||||
self.notebook.set_property('show-tabs', False)
|
||||
|
||||
self.tabbar = HobTabBar()
|
||||
self.tabbar.connect("tab-switched", self.tab_switched_cb)
|
||||
self.notebook.connect("page-added", self.page_added_cb)
|
||||
self.notebook.connect("page-removed", self.page_removed_cb)
|
||||
|
||||
self.search = None
|
||||
self.search_name = ""
|
||||
|
||||
self.connect("switch-page", self.page_changed_cb)
|
||||
self.tb = gtk.Table(1, 100, False)
|
||||
self.hbox= gtk.HBox(False, 0)
|
||||
self.hbox.pack_start(self.tabbar, True, True)
|
||||
self.tb.attach(self.hbox, 0, 100, 0, 1)
|
||||
|
||||
self.pack_start(self.tb, False, False)
|
||||
self.pack_start(self.notebook)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def page_changed_cb(self, nb, page, page_num):
|
||||
for p, lbl in enumerate(self.pages):
|
||||
if p == page_num:
|
||||
lbl.set_active()
|
||||
else:
|
||||
lbl.set_active(False)
|
||||
|
||||
def append_page(self, child, tab_label, tab_tooltip=None):
|
||||
label = HobTabLabel(tab_label)
|
||||
if tab_tooltip:
|
||||
label.set_tooltip_text(tab_tooltip)
|
||||
label.set_active(False)
|
||||
self.pages.append(label)
|
||||
gtk.Notebook.append_page(self, child, label)
|
||||
def append_page(self, child, tab_label):
|
||||
self.notebook.set_current_page(self.notebook.append_page(child, tab_label))
|
||||
|
||||
def set_entry(self, name="Search:"):
|
||||
for child in self.tb.get_children():
|
||||
if child:
|
||||
self.tb.remove(child)
|
||||
|
||||
hbox_entry = gtk.HBox(False, 0)
|
||||
hbox_entry.show()
|
||||
|
||||
self.search = gtk.Entry()
|
||||
self.search_name = name
|
||||
style = self.search.get_style()
|
||||
@@ -496,20 +747,59 @@ class HobNotebook(gtk.Notebook):
|
||||
self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
|
||||
self.search.connect("icon-release", self.set_search_entry_clear_cb)
|
||||
self.search.show()
|
||||
self.align = gtk.Alignment(xalign=1.0, yalign=0.7)
|
||||
self.align.add(self.search)
|
||||
self.align.show()
|
||||
hbox_entry.pack_end(self.align, False, False)
|
||||
self.tabbar.resize_blank_rectangle()
|
||||
|
||||
self.tb.attach(hbox_entry, 75, 100, 0, 1, xpadding=5)
|
||||
self.tb.attach(self.hbox, 0, 100, 0, 1)
|
||||
|
||||
self.tabbar.connect("blank-area-changed", self.blank_area_resize_cb)
|
||||
self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
|
||||
self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
|
||||
self.set_action_widget(self.search, gtk.PACK_END)
|
||||
|
||||
self.tb.show()
|
||||
|
||||
def show_indicator_icon(self, title, number):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(number)
|
||||
for child in self.tabbar.children:
|
||||
if child["toggled_page"] == -1:
|
||||
continue
|
||||
if child["title"] == title:
|
||||
self.tabbar.show_indicator_icon(child, number)
|
||||
|
||||
def hide_indicator_icon(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(0)
|
||||
for child in self.tabbar.children:
|
||||
if child["toggled_page"] == -1:
|
||||
continue
|
||||
if child["title"] == title:
|
||||
self.tabbar.hide_indicator_icon(child)
|
||||
|
||||
def tab_switched_cb(self, widget, page):
|
||||
self.notebook.set_current_page(page)
|
||||
|
||||
def page_added_cb(self, notebook, notebook_child, page):
|
||||
if not notebook:
|
||||
return
|
||||
title = notebook.get_tab_label_text(notebook_child)
|
||||
label = notebook.get_tab_label(notebook_child)
|
||||
tooltip_markup = label.get_tooltip_markup()
|
||||
if not title:
|
||||
return
|
||||
for child in self.tabbar.children:
|
||||
if child["title"] == title:
|
||||
child["toggled_page"] = page
|
||||
return
|
||||
self.tabbar.append_tab_child(title, page, tooltip_markup)
|
||||
|
||||
def page_removed_cb(self, notebook, notebook_child, page, title=""):
|
||||
for child in self.tabbar.children:
|
||||
if child["title"] == title:
|
||||
child["toggled_page"] = -1
|
||||
|
||||
def blank_area_resize_cb(self, widget, request_x, request_y, request_width, request_height):
|
||||
self.search.set_size_request(request_width, request_height)
|
||||
|
||||
def set_search_entry_editable_cb(self, search, event):
|
||||
search.set_editable(True)
|
||||
@@ -529,8 +819,7 @@ class HobNotebook(gtk.Notebook):
|
||||
self.reset_entry(search)
|
||||
|
||||
def set_search_entry_clear_cb(self, search, icon_pos, event):
|
||||
if search.get_editable() == True:
|
||||
search.set_text("")
|
||||
self.reset_entry(search)
|
||||
|
||||
class HobWarpCellRendererText(gtk.CellRendererText):
|
||||
def __init__(self, col_number):
|
||||
@@ -795,17 +1084,11 @@ class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
gtk.CellRendererToggle.__init__(self)
|
||||
self.ctrl = HobCellRendererController(is_draw_row=True)
|
||||
self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
|
||||
self.cell_attr = {"fadeout": False, "number_of_children": 0}
|
||||
self.cell_attr = {"fadeout": False}
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if (not self.ctrl) or (not widget):
|
||||
return
|
||||
|
||||
if flags & gtk.CELL_RENDERER_SELECTED:
|
||||
state = gtk.STATE_SELECTED
|
||||
else:
|
||||
state = gtk.STATE_NORMAL
|
||||
|
||||
if self.ctrl.is_active():
|
||||
path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
|
||||
# sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
|
||||
@@ -814,23 +1097,14 @@ class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
path = path[0]
|
||||
if path in self.ctrl.running_cell_areas:
|
||||
cr = window.cairo_create()
|
||||
color = widget.get_style().base[state]
|
||||
color = gtk.gdk.Color(HobColors.WHITE)
|
||||
|
||||
row_x, _, row_width, _ = widget.get_visible_rect()
|
||||
border_y = self.get_property("ypad")
|
||||
self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
|
||||
cell_area.height + border_y * 2, self.cell_attr["fadeout"])
|
||||
# draw number of a group
|
||||
if self.cell_attr["number_of_children"]:
|
||||
text = "%d pkg" % self.cell_attr["number_of_children"]
|
||||
pangolayout = widget.create_pango_layout(text)
|
||||
textw, texth = pangolayout.get_pixel_size()
|
||||
x = cell_area.x + (cell_area.width/2) - (textw/2)
|
||||
y = cell_area.y + (cell_area.height/2) - (texth/2)
|
||||
|
||||
widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
|
||||
else:
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
'''delay: normally delay time is 1000ms
|
||||
cell_list: whilch cells need to be render
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
import re
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
|
||||
@@ -34,9 +33,6 @@ from bb.ui.crumbs.hobpages import HobPage
|
||||
#
|
||||
class ImageConfigurationPage (HobPage):
|
||||
|
||||
__dummy_machine__ = "--select a machine--"
|
||||
__dummy_image__ = "--select a base image--"
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
|
||||
|
||||
@@ -151,6 +147,7 @@ class ImageConfigurationPage (HobPage):
|
||||
self.machine_title_desc.set_markup(mark)
|
||||
|
||||
self.machine_combo = gtk.combo_box_new_text()
|
||||
self.machine_combo.set_wrap_width(1)
|
||||
self.machine_combo.connect("changed", self.machine_combo_changed_cb)
|
||||
|
||||
icon_file = hic.ICON_LAYERS_DISPLAY_FILE
|
||||
@@ -199,12 +196,11 @@ class ImageConfigurationPage (HobPage):
|
||||
self.image_title_desc.set_markup(mark)
|
||||
|
||||
self.image_combo = gtk.combo_box_new_text()
|
||||
self.image_combo.set_wrap_width(1)
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
self.image_desc = gtk.Label()
|
||||
self.image_desc.set_alignment(0.0, 0.5)
|
||||
self.image_desc.set_size_request(360, -1)
|
||||
self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
|
||||
self.image_desc.set_line_wrap(True)
|
||||
|
||||
# button to view recipes
|
||||
@@ -263,15 +259,9 @@ class ImageConfigurationPage (HobPage):
|
||||
|
||||
def machine_combo_changed_cb(self, machine_combo):
|
||||
combo_item = machine_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_machine__:
|
||||
if not combo_item:
|
||||
return
|
||||
|
||||
# remove __dummy_machine__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = machine_combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
|
||||
machine_combo.remove_text(0)
|
||||
|
||||
self.builder.configuration.curr_mach = combo_item
|
||||
if self.machine_combo_changed_by_manual:
|
||||
self.builder.configuration.clear_selection()
|
||||
@@ -282,13 +272,13 @@ class ImageConfigurationPage (HobPage):
|
||||
self.builder.populate_recipe_package_info_async()
|
||||
|
||||
def update_machine_combo(self):
|
||||
all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
|
||||
all_machines = self.builder.parameters.all_machines
|
||||
|
||||
model = self.machine_combo.get_model()
|
||||
model.clear()
|
||||
for machine in all_machines:
|
||||
self.machine_combo.append_text(machine)
|
||||
self.machine_combo.set_active(0)
|
||||
self.machine_combo.set_active(-1)
|
||||
|
||||
def switch_machine_combo(self):
|
||||
self.machine_combo_changed_by_manual = False
|
||||
@@ -299,15 +289,10 @@ class ImageConfigurationPage (HobPage):
|
||||
self.machine_combo.set_active(active)
|
||||
return
|
||||
active += 1
|
||||
self.machine_combo.set_active(-1)
|
||||
|
||||
if model[0][0] != self.__dummy_machine__:
|
||||
self.machine_combo.insert_text(0, self.__dummy_machine__)
|
||||
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def update_image_desc(self):
|
||||
def update_image_desc(self, selected_image):
|
||||
desc = ""
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
@@ -324,15 +309,9 @@ class ImageConfigurationPage (HobPage):
|
||||
def image_combo_changed_cb(self, combo):
|
||||
self.builder.window_sensitive(False)
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if not selected_image or (selected_image == self.__dummy_image__):
|
||||
if not selected_image:
|
||||
return
|
||||
|
||||
# remove __dummy_image__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
|
||||
combo.remove_text(0)
|
||||
|
||||
self.builder.customized = False
|
||||
|
||||
selected_recipes = []
|
||||
@@ -340,7 +319,7 @@ class ImageConfigurationPage (HobPage):
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
|
||||
self.update_image_desc()
|
||||
self.update_image_desc(selected_image)
|
||||
|
||||
self.builder.recipe_model.reset()
|
||||
self.builder.package_model.reset()
|
||||
@@ -363,61 +342,32 @@ class ImageConfigurationPage (HobPage):
|
||||
# populate image combo
|
||||
filter = {RecipeListModel.COL_TYPE : ['image']}
|
||||
image_model = recipe_model.tree_model(filter)
|
||||
active = 0
|
||||
cnt = 1
|
||||
|
||||
white_pattern = []
|
||||
if self.builder.parameters.image_white_pattern:
|
||||
for i in self.builder.parameters.image_white_pattern.split():
|
||||
white_pattern.append(re.compile(i))
|
||||
|
||||
black_pattern = []
|
||||
if self.builder.parameters.image_black_pattern:
|
||||
for i in self.builder.parameters.image_black_pattern.split():
|
||||
black_pattern.append(re.compile(i))
|
||||
active = -1
|
||||
cnt = 0
|
||||
|
||||
it = image_model.get_iter_first()
|
||||
self._image_combo_disconnect_signal()
|
||||
model = self.image_combo.get_model()
|
||||
model.clear()
|
||||
# Set a indicator text to combo store when first open
|
||||
self.image_combo.append_text(self.__dummy_image__)
|
||||
# append and set active
|
||||
while it:
|
||||
path = image_model.get_path(it)
|
||||
it = image_model.iter_next(it)
|
||||
image_name = image_model[path][recipe_model.COL_NAME]
|
||||
if image_name == self.builder.recipe_model.__custom_image__:
|
||||
if image_name == self.builder.recipe_model.__dummy_image__:
|
||||
continue
|
||||
|
||||
if black_pattern:
|
||||
allow = True
|
||||
for pattern in black_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = False
|
||||
break
|
||||
elif white_pattern:
|
||||
allow = False
|
||||
for pattern in white_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = True
|
||||
break
|
||||
else:
|
||||
allow = True
|
||||
|
||||
if allow:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
|
||||
self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
self.image_combo.append_text(self.builder.recipe_model.__dummy_image__)
|
||||
if selected_image == self.builder.recipe_model.__dummy_image__:
|
||||
active = cnt
|
||||
|
||||
self.image_combo.set_active(-1)
|
||||
self.image_combo.set_active(active)
|
||||
|
||||
if active != 0:
|
||||
if active != -1:
|
||||
self.show_baseimg_selected()
|
||||
|
||||
self._image_combo_connect_signal()
|
||||
|
||||
@@ -25,8 +25,7 @@ import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobViewTable, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
import subprocess
|
||||
from bb.ui.crumbs.hig import CrumbsDialog
|
||||
|
||||
#
|
||||
# ImageDetailsPage
|
||||
#
|
||||
@@ -38,19 +37,19 @@ class ImageDetailsPage (HobPage):
|
||||
'col_style': 'text',
|
||||
'col_min' : 500,
|
||||
'col_max' : 500
|
||||
},{
|
||||
}, {
|
||||
'col_name' : 'Image size',
|
||||
'col_id' : 1,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
},{
|
||||
}, {
|
||||
'col_name' : 'Select',
|
||||
'col_id' : 2,
|
||||
'col_style': 'radio toggle',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
}]
|
||||
|
||||
class DetailBox (gtk.EventBox):
|
||||
def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, color = HobColors.LIGHT_GRAY):
|
||||
@@ -65,31 +64,27 @@ class ImageDetailsPage (HobPage):
|
||||
self.hbox.set_border_width(15)
|
||||
self.add(self.hbox)
|
||||
|
||||
total_rows = 0
|
||||
if widget:
|
||||
total_rows = 10
|
||||
if varlist and vallist:
|
||||
row = 1
|
||||
elif varlist and vallist:
|
||||
# pack the icon and the text on the left
|
||||
total_rows += len(varlist)
|
||||
self.table = gtk.Table(total_rows, 20, True)
|
||||
self.table.set_row_spacings(6)
|
||||
row = len(varlist)
|
||||
self.table = gtk.Table(row, 20, True)
|
||||
self.table.set_size_request(100, -1)
|
||||
self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
|
||||
|
||||
colid = 0
|
||||
rowid = 0
|
||||
self.line_widgets = {}
|
||||
if icon:
|
||||
self.table.attach(icon, colid, colid + 2, 0, 1)
|
||||
colid = colid + 2
|
||||
if widget:
|
||||
self.table.attach(widget, colid, 20, 0, 10)
|
||||
rowid = 10
|
||||
if varlist and vallist:
|
||||
for row in range(rowid, total_rows):
|
||||
index = row - rowid
|
||||
self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
|
||||
self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
|
||||
self.table.attach(widget, colid, 20, 0, 1)
|
||||
elif varlist and vallist:
|
||||
for line in range(0, row):
|
||||
self.line_widgets[varlist[line]] = self.text2label(varlist[line], vallist[line])
|
||||
self.table.attach(self.line_widgets[varlist[line]], colid, 20, line, line + 1)
|
||||
|
||||
# pack the button on the right
|
||||
if button:
|
||||
self.hbox.pack_end(button, expand=False, fill=False)
|
||||
@@ -101,23 +96,9 @@ class ImageDetailsPage (HobPage):
|
||||
return
|
||||
self.line_widgets[variable].set_markup(self.format_line(variable, value))
|
||||
|
||||
def wrap_line(self, inputs):
|
||||
# wrap the long text of inputs
|
||||
wrap_width_chars = 75
|
||||
outputs = ""
|
||||
tmps = inputs
|
||||
less_chars = len(inputs)
|
||||
while (less_chars - wrap_width_chars) > 0:
|
||||
less_chars -= wrap_width_chars
|
||||
outputs += tmps[:wrap_width_chars] + "\n "
|
||||
tmps = inputs[less_chars:]
|
||||
outputs += tmps
|
||||
return outputs
|
||||
|
||||
def format_line(self, variable, value):
|
||||
wraped_value = self.wrap_line(value)
|
||||
markup = "<span weight=\'bold\'>%s</span>" % variable
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % value
|
||||
return markup
|
||||
|
||||
def text2label(self, variable, value):
|
||||
@@ -131,7 +112,7 @@ class ImageDetailsPage (HobPage):
|
||||
def __init__(self, builder):
|
||||
super(ImageDetailsPage, self).__init__(builder, "Image details")
|
||||
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, gobject.TYPE_STRING)
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.button_ids = {}
|
||||
self.details_bottom_buttons = gtk.HBox(False, 6)
|
||||
self.create_visual_elements()
|
||||
@@ -176,10 +157,10 @@ class ImageDetailsPage (HobPage):
|
||||
self.details_bottom_buttons.remove(child)
|
||||
|
||||
def show_page(self, step):
|
||||
self.build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
image_addr = self.builder.parameters.image_addr
|
||||
image_names = self.builder.parameters.image_names
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
machine = self.builder.configuration.curr_mach
|
||||
base_image = self.builder.recipe_model.get_selected_image()
|
||||
layers = self.builder.configuration.layers
|
||||
@@ -191,13 +172,12 @@ class ImageDetailsPage (HobPage):
|
||||
for button_id, button in self.button_ids.items():
|
||||
button.disconnect(button_id)
|
||||
self._remove_all_widget()
|
||||
|
||||
# repack
|
||||
self.pack_start(self.details_top_buttons, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.build_result = None
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
# building is the previous step
|
||||
icon = gtk.Image()
|
||||
pixmap_path = hic.ICON_INDI_CONFIRM_FILE
|
||||
@@ -210,80 +190,45 @@ class ImageDetailsPage (HobPage):
|
||||
self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
|
||||
|
||||
# create the buttons at the bottom first because the buttons are used in apply_button_per_image()
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
self.buttonlist = ["Build new image", "Save as template", "Run image", "Deploy image"]
|
||||
else: # get to this page from "My images"
|
||||
self.buttonlist = ["Build new image", "Run image", "Deploy image"]
|
||||
|
||||
# Name
|
||||
self.image_store.clear()
|
||||
self.toggled_image = ""
|
||||
default_toggled = False
|
||||
default_image_size = 0
|
||||
self.num_toggled = 0
|
||||
i = 0
|
||||
for image_name in image_names:
|
||||
image_size = HobPage._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
|
||||
|
||||
image_attr = ("runnable" if (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) else \
|
||||
("deploy" if self.test_deployable(image_name) else ""))
|
||||
is_toggled = (image_attr != "")
|
||||
|
||||
if not self.toggled_image:
|
||||
if not default_toggled:
|
||||
default_toggled = (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) \
|
||||
or self.test_deployable(image_name)
|
||||
if i == (len(image_names) - 1):
|
||||
is_toggled = True
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, is_toggled, 3, image_attr)
|
||||
if is_toggled:
|
||||
default_toggled = True
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, default_toggled)
|
||||
if default_toggled:
|
||||
default_image_size = image_size
|
||||
self.toggled_image = image_name
|
||||
|
||||
self.create_bottom_buttons(self.buttonlist, image_name)
|
||||
else:
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, False, 3, image_attr)
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, False)
|
||||
i = i + 1
|
||||
self.num_toggled += is_toggled
|
||||
|
||||
is_runnable = self.create_bottom_buttons(self.buttonlist, self.toggled_image)
|
||||
|
||||
if self.build_succeeded:
|
||||
varlist = ["Name: ", "Directory: "]
|
||||
vallist = []
|
||||
vallist.append(image_name.split('.')[0])
|
||||
vallist.append(image_addr)
|
||||
image_table = None
|
||||
else:
|
||||
varlist = None
|
||||
vallist = None
|
||||
image_table = HobViewTable(self.__columns__)
|
||||
image_table.set_model(self.image_store)
|
||||
image_table.connect("row-activated", self.row_activated_cb)
|
||||
image_table.connect_group_selection(self.table_selected_cb)
|
||||
|
||||
image_table = HobViewTable(self.__columns__)
|
||||
image_table.set_model(self.image_store)
|
||||
image_table.connect("toggled", self.toggled_cb)
|
||||
view_files_button = HobAltButton("View files")
|
||||
view_files_button.connect("clicked", self.view_files_clicked_cb, image_addr)
|
||||
view_files_button.set_tooltip_text("Open the directory containing the image files")
|
||||
self.image_detail = self.DetailBox(widget=image_table, varlist=varlist, vallist=vallist, button=view_files_button)
|
||||
self.image_detail = self.DetailBox(widget=image_table, button=view_files_button)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=True, fill=True)
|
||||
|
||||
# The default kernel box for the qemu images
|
||||
self.sel_kernel = ""
|
||||
if 'qemu' in image_name:
|
||||
self.sel_kernel = self.get_kernel_file_name()
|
||||
|
||||
varlist = ["Kernel: "]
|
||||
vallist = []
|
||||
vallist.append(self.sel_kernel)
|
||||
|
||||
change_kernel_button = HobAltButton("Change")
|
||||
change_kernel_button.connect("clicked", self.change_kernel_cb)
|
||||
change_kernel_button.set_tooltip_text("Change qemu kernel file")
|
||||
self.kernel_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=change_kernel_button)
|
||||
self.box_group_area.pack_start(self.kernel_detail, expand=False, fill=False)
|
||||
|
||||
# Machine, Base image and Layers
|
||||
layer_num_limit = 15
|
||||
varlist = ["Machine: ", "Base image: ", "Layers: "]
|
||||
vallist = []
|
||||
self.setting_detail = None
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
vallist.append(machine)
|
||||
vallist.append(base_image)
|
||||
i = 0
|
||||
@@ -307,14 +252,14 @@ class ImageDetailsPage (HobPage):
|
||||
edit_config_button.set_tooltip_text("Edit machine, base image and recipes")
|
||||
edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
|
||||
self.setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_config_button)
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=False, fill=False)
|
||||
|
||||
# Packages included, and Total image size
|
||||
varlist = ["Packages included: ", "Total image size: "]
|
||||
vallist = []
|
||||
vallist.append(pkg_num)
|
||||
vallist.append(default_image_size)
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
edit_packages_button = HobAltButton("Edit packages")
|
||||
edit_packages_button.set_tooltip_text("Edit the packages included in your image")
|
||||
edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
|
||||
@@ -327,11 +272,9 @@ class ImageDetailsPage (HobPage):
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
if not is_runnable:
|
||||
self.kernel_detail.hide()
|
||||
|
||||
def view_files_clicked_cb(self, button, image_addr):
|
||||
subprocess.call("xdg-open /%s" % image_addr, shell=True)
|
||||
os.system("xdg-open /%s" % image_addr)
|
||||
|
||||
def refresh_package_detail_box(self, image_size):
|
||||
self.package_detail.update_line_widgets("Total image size: ", image_size)
|
||||
@@ -360,63 +303,21 @@ class ImageDetailsPage (HobPage):
|
||||
break
|
||||
return deployable
|
||||
|
||||
def get_kernel_file_name(self, kernel_addr=""):
|
||||
kernel_name = ""
|
||||
|
||||
if not kernel_addr:
|
||||
kernel_addr = self.builder.parameters.image_addr
|
||||
|
||||
files = [f for f in os.listdir(kernel_addr) if f[0] <> '.']
|
||||
for check_file in files:
|
||||
if check_file.endswith(".bin"):
|
||||
name_splits = check_file.split(".")[0]
|
||||
if self.builder.parameters.kernel_image_type in name_splits.split("-"):
|
||||
kernel_name = check_file
|
||||
break
|
||||
|
||||
return kernel_name
|
||||
|
||||
def show_builded_images_dialog(self, widget):
|
||||
dialog = CrumbsDialog("Your builded images", self.builder,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
dialog.set_size_request(-1, 350)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_use_markup(True)
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span font_desc='12'>Please select a image to run or deploy</span>")
|
||||
dialog.vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
image_table = HobViewTable(self.__columns__)
|
||||
image_table.set_model(self.image_store)
|
||||
image_table.connect("row-activated", self.row_activated_cb)
|
||||
image_table.connect_group_selection(self.table_selected_cb)
|
||||
dialog.vbox.pack_start(image_table, expand=True, fill=True)
|
||||
|
||||
button = dialog.add_button(" OK ", gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
|
||||
dialog.show_all()
|
||||
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
if response != gtk.RESPONSE_YES:
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][columnid] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
it = self.image_store.get_iter_first()
|
||||
while it:
|
||||
image_attr = self.image_store.get_value(it, 3)
|
||||
is_select = self.image_store.get_value(it, 2)
|
||||
if is_select:
|
||||
image_name = self.image_store.get_value(it, 0)
|
||||
if image_attr == 'runnable':
|
||||
self.builder.runqemu_image(image_name, self.sel_kernel)
|
||||
elif image_attr == 'deploy':
|
||||
self.builder.deploy_image(image_name)
|
||||
it = self.image_store.iter_next(it)
|
||||
model[path][columnid] = True
|
||||
self.refresh_package_detail_box(model[path][1])
|
||||
|
||||
image_name = model[path][0]
|
||||
|
||||
def repack_box_group(self, image_name=None):
|
||||
# remove
|
||||
for button_id, button in self.button_ids.items():
|
||||
button.disconnect(button_id)
|
||||
@@ -427,59 +328,18 @@ class ImageDetailsPage (HobPage):
|
||||
if self.build_result:
|
||||
self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=True, fill=True)
|
||||
if self.kernel_detail:
|
||||
self.box_group_area.pack_start(self.kernel_detail, expand=False, fill=False)
|
||||
if self.setting_detail:
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.package_detail, expand=False, fill=False)
|
||||
is_runnable = self.create_bottom_buttons(self.buttonlist, image_name)
|
||||
self.create_bottom_buttons(self.buttonlist, image_name)
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
self.show_all()
|
||||
if not is_runnable:
|
||||
self.kernel_detail.hide()
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if (not model) or (not paths):
|
||||
return
|
||||
|
||||
path = paths[0]
|
||||
columnid = 2
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][columnid] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
model[path][columnid] = True
|
||||
self.refresh_package_detail_box(model[path][1])
|
||||
|
||||
self.toggled_image = model[path][0]
|
||||
self.repack_box_group(self.toggled_image)
|
||||
|
||||
def change_kernel_cb(self, widget):
|
||||
kernel_path = self.builder.show_load_kernel_dialog()
|
||||
if kernel_path and self.kernel_detail:
|
||||
import os.path
|
||||
self.sel_kernel = os.path.basename(kernel_path)
|
||||
markup = self.kernel_detail.format_line("Kernel: ", self.sel_kernel)
|
||||
label = ((self.kernel_detail.get_children()[0]).get_children()[0]).get_children()[0]
|
||||
label.set_markup(markup)
|
||||
|
||||
def row_activated_cb(self, table, model, path):
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter(path)
|
||||
image_name = model[path][0]
|
||||
if iter and model[path][2] == True:
|
||||
self.builder.runqemu_image(image_name, self.sel_kernel)
|
||||
|
||||
def create_bottom_buttons(self, buttonlist, image_name):
|
||||
# Create the buttons at the bottom
|
||||
created = False
|
||||
packed = False
|
||||
self.button_ids = {}
|
||||
is_runnable = False
|
||||
|
||||
# create button "Deploy image"
|
||||
name = "Deploy image"
|
||||
@@ -514,7 +374,15 @@ class ImageDetailsPage (HobPage):
|
||||
self.button_ids[button_id] = run_button
|
||||
self.details_bottom_buttons.pack_end(run_button, expand=False, fill=False)
|
||||
created = True
|
||||
is_runnable = True
|
||||
|
||||
if not packed:
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
subbox = gtk.HBox(False, 0)
|
||||
subbox.set_size_request(205, 49)
|
||||
subbox.show()
|
||||
box.add(subbox)
|
||||
self.details_bottom_buttons.pack_end(box, False, False)
|
||||
|
||||
name = "Save as template"
|
||||
if name in buttonlist:
|
||||
@@ -523,13 +391,8 @@ class ImageDetailsPage (HobPage):
|
||||
label = gtk.Label(" or ")
|
||||
self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Save as template"
|
||||
save_button = HobAltButton("Save as template")
|
||||
else:
|
||||
save_button = HobButton("Save as template")
|
||||
save_button.set_size_request(205, 49)
|
||||
save_button.set_flags(gtk.CAN_DEFAULT)
|
||||
packed = True
|
||||
# create button "Save as template"
|
||||
save_button = HobAltButton("Save as template")
|
||||
save_button.set_tooltip_text("Save the image configuration for reuse")
|
||||
button_id = save_button.connect("clicked", self.save_button_clicked_cb)
|
||||
self.button_ids[button_id] = save_button
|
||||
@@ -539,34 +402,34 @@ class ImageDetailsPage (HobPage):
|
||||
name = "Build new image"
|
||||
if name in buttonlist:
|
||||
# create button "Build new image"
|
||||
if packed:
|
||||
build_new_button = HobAltButton("Build new image")
|
||||
self.details_bottom_buttons.pack_start(build_new_button, expand=False, fill=False)
|
||||
else:
|
||||
build_new_button = HobButton("Build new image")
|
||||
build_new_button.set_size_request(205, 49)
|
||||
build_new_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.details_bottom_buttons.pack_end(build_new_button, expand=False, fill=False)
|
||||
build_new_button = HobAltButton("Build new image")
|
||||
build_new_button.set_tooltip_text("Create a new image from scratch")
|
||||
button_id = build_new_button.connect("clicked", self.build_new_button_clicked_cb)
|
||||
self.button_ids[button_id] = build_new_button
|
||||
self.details_bottom_buttons.pack_start(build_new_button, expand=False, fill=False)
|
||||
|
||||
return is_runnable
|
||||
def _get_selected_image(self):
|
||||
image_name = ""
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][2]:
|
||||
image_name = self.image_store[path][0]
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
|
||||
return image_name
|
||||
|
||||
def save_button_clicked_cb(self, button):
|
||||
self.builder.show_save_template_dialog()
|
||||
|
||||
def deploy_button_clicked_cb(self, button):
|
||||
if self.build_succeeded and self.num_toggled > 1:
|
||||
self.show_builded_images_dialog()
|
||||
return
|
||||
self.builder.deploy_image(self.toggled_image)
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.deploy_image(image_name)
|
||||
|
||||
def run_button_clicked_cb(self, button):
|
||||
if self.build_succeeded and self.num_toggled > 1:
|
||||
self.show_builded_images_dialog()
|
||||
return
|
||||
self.builder.runqemu_image(self.toggled_image, self.sel_kernel)
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.runqemu_image(image_name)
|
||||
|
||||
def build_new_button_clicked_cb(self, button):
|
||||
self.builder.initiate_new_build_async()
|
||||
|
||||
@@ -39,7 +39,6 @@ class PackageSelectionPage (HobPage):
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
@@ -47,7 +46,6 @@ class PackageSelectionPage (HobPage):
|
||||
}, {
|
||||
'col_name' : 'Brought in by',
|
||||
'col_id' : PackageListModel.COL_BINB,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'binb',
|
||||
'col_min' : 100,
|
||||
'col_max' : 350,
|
||||
@@ -55,7 +53,6 @@ class PackageSelectionPage (HobPage):
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
@@ -63,9 +60,7 @@ class PackageSelectionPage (HobPage):
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'check toggle',
|
||||
'col_group': 'tree store group',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
@@ -75,7 +70,6 @@ class PackageSelectionPage (HobPage):
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
'col_id' : PackageListModel.COL_NAME,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
@@ -83,7 +77,6 @@ class PackageSelectionPage (HobPage):
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_t_id' : PackageListModel.COL_FONT,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 500,
|
||||
@@ -92,7 +85,6 @@ class PackageSelectionPage (HobPage):
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
'col_style': 'check toggle',
|
||||
'col_group': 'tree store group',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
@@ -109,14 +101,8 @@ class PackageSelectionPage (HobPage):
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(0)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Button("Packages included: 0\nSelected packages size: 0 MB")
|
||||
self.label.set_can_default(False)
|
||||
self.label.set_relief(gtk.RELIEF_HALF)
|
||||
self.label.connect("clicked", self.included_clicked_cb)
|
||||
self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
@@ -131,11 +117,11 @@ class PackageSelectionPage (HobPage):
|
||||
filter = page['filter']
|
||||
tab.set_model(self.package_model.tree_model(filter))
|
||||
tab.connect("toggled", self.table_toggled_cb, page['name'])
|
||||
tab.connect_group_selection(self.table_selected_cb)
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'])
|
||||
label = gtk.Label(page['name'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search packages:")
|
||||
@@ -197,7 +183,7 @@ class PackageSelectionPage (HobPage):
|
||||
image_total_size += (51200 * 1024)
|
||||
image_total_size_str = HobPage._size_to_string(image_total_size)
|
||||
|
||||
self.label.set_label("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
self.label.set_text("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
(selected_packages_num, selected_packages_size_str, image_total_size_str))
|
||||
self.ins.show_indicator_icon("Included", selected_packages_num)
|
||||
|
||||
@@ -215,7 +201,7 @@ class PackageSelectionPage (HobPage):
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.configuration.selected_image = self.recipe_model.__dummy_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
@@ -261,20 +247,3 @@ class PackageSelectionPage (HobPage):
|
||||
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
|
||||
tree.set_model(self.package_model.tree_model(self.pages[0]['filter']))
|
||||
tree.expand_all()
|
||||
|
||||
def foreach_cell_change_font(self, model, path, iter, paths=None):
|
||||
# Changed the font for a group cells
|
||||
if path and iter and path[0] == paths[0]:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, "bold")
|
||||
else:
|
||||
if iter and model.iter_parent(iter) == None:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, '11')
|
||||
else:
|
||||
self.package_model.set(iter, self.package_model.COL_FONT, '10')
|
||||
|
||||
def table_selected_cb(self, selection):
|
||||
model, paths = selection.get_selected_rows()
|
||||
if paths:
|
||||
child_path = self.package_model.convert_vpath_to_path(model, paths[0])
|
||||
self.package_model.foreach(self.foreach_cell_change_font, child_path)
|
||||
|
||||
|
||||
@@ -134,14 +134,8 @@ class RecipeSelectionPage (HobPage):
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(0)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Button('Recipes included: 0')
|
||||
self.label.set_can_default(False)
|
||||
self.label.set_relief(gtk.RELIEF_HALF)
|
||||
self.label.connect("clicked", self.included_clicked_cb)
|
||||
self.label = gtk.Label()
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
@@ -159,7 +153,10 @@ class RecipeSelectionPage (HobPage):
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'], page['tooltip'])
|
||||
label = gtk.Label(page['name'])
|
||||
label.set_selectable(False)
|
||||
label.set_tooltip_text(page['tooltip'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search recipes:")
|
||||
@@ -205,7 +202,7 @@ class RecipeSelectionPage (HobPage):
|
||||
def refresh_selection(self):
|
||||
self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
|
||||
_, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
self.label.set_label("Recipes included: %s" % len(self.builder.configuration.selected_recipes))
|
||||
self.label.set_text("Recipes included: %s" % len(self.builder.configuration.selected_recipes))
|
||||
self.ins.show_indicator_icon("Included", len(self.builder.configuration.selected_recipes))
|
||||
|
||||
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
|
||||
@@ -222,7 +219,7 @@ class RecipeSelectionPage (HobPage):
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.configuration.selected_image = self.recipe_model.__dummy_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
|
||||
@@ -101,19 +101,7 @@ class HobTemplateFile(ConfigFile):
|
||||
return self.dictionary[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def getVersion(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
|
||||
pattern = "^\s*(\S+)\s*=\s*(\".*?\")"
|
||||
|
||||
for line in contents:
|
||||
match = re.search(pattern, line)
|
||||
if match:
|
||||
if match.group(1) == "VERSION":
|
||||
return match.group(2).strip('"')
|
||||
return None
|
||||
|
||||
|
||||
def load(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
self.dictionary.clear()
|
||||
@@ -186,9 +174,6 @@ class TemplateMgr(gobject.GObject):
|
||||
self.image_bb.save()
|
||||
self.template_hob.save()
|
||||
|
||||
def getVersion(self, path):
|
||||
return HobTemplateFile(path).getVersion()
|
||||
|
||||
def load(self, path):
|
||||
self.template_hob = HobTemplateFile(path)
|
||||
self.dictionary = self.template_hob.load()
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
# bitbake which will allow more flexibility.
|
||||
|
||||
import os
|
||||
import bb
|
||||
|
||||
def which_terminal():
|
||||
term = bb.utils.which(os.environ["PATH"], "xterm")
|
||||
|
||||
@@ -30,7 +30,7 @@ try:
|
||||
pygtk.require('2.0') # to be certain we don't have gtk+ 1.x !?!
|
||||
gtkver = gtk.gtk_version
|
||||
pygtkver = gtk.pygtk_version
|
||||
if gtkver < (2, 20, 0) or pygtkver < (2, 21, 0):
|
||||
if gtkver < (2, 18, 0) or pygtkver < (2, 16, 0):
|
||||
sys.exit("%s,\nYou have Gtk+ %s and PyGtk %s." % (requirements,
|
||||
".".join(map(str, gtkver)),
|
||||
".".join(map(str, pygtkver))))
|
||||
|
||||
@@ -106,4 +106,4 @@ class TerminalFilter2(object):
|
||||
self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
|
||||
|
||||
def main(server, eventHandler):
|
||||
return bb.ui.knotty.main(server, eventHandler, TerminalFilter2)
|
||||
bb.ui.knotty.main(server, eventHandler, TerminalFilter2)
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
|
||||
from __future__ import division
|
||||
import logging
|
||||
import os, sys, curses, itertools, time, subprocess
|
||||
import os, sys, curses, itertools, time
|
||||
import bb
|
||||
import xmlrpclib
|
||||
from bb import ui
|
||||
@@ -286,7 +286,7 @@ class NCursesUI:
|
||||
# bb.error("log data follows (%s)" % logfile)
|
||||
# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
|
||||
# if number_of_lines:
|
||||
# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
|
||||
# os.system('tail -n%s %s' % (number_of_lines, logfile))
|
||||
# else:
|
||||
# f = open(logfile, "r")
|
||||
# while True:
|
||||
|
||||
@@ -108,10 +108,130 @@ def vercmp(ta, tb):
|
||||
r = vercmp_part(ra, rb)
|
||||
return r
|
||||
|
||||
def vercmp_string(a, b):
|
||||
ta = split_version(a)
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
_package_weights_ = {"pre":-2, "p":0, "alpha":-4, "beta":-3, "rc":-1} # dicts are unordered
|
||||
_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
|
||||
|
||||
def relparse(myver):
|
||||
"""Parses the last elements of a version number into a triplet, that can
|
||||
later be compared.
|
||||
"""
|
||||
|
||||
number = 0
|
||||
p1 = 0
|
||||
p2 = 0
|
||||
mynewver = myver.split('_')
|
||||
if len(mynewver) == 2:
|
||||
# an _package_weights_
|
||||
number = float(mynewver[0])
|
||||
match = 0
|
||||
for x in _package_ends_:
|
||||
elen = len(x)
|
||||
if mynewver[1][:elen] == x:
|
||||
match = 1
|
||||
p1 = _package_weights_[x]
|
||||
try:
|
||||
p2 = float(mynewver[1][elen:])
|
||||
except:
|
||||
p2 = 0
|
||||
break
|
||||
if not match:
|
||||
# normal number or number with letter at end
|
||||
divider = len(myver)-1
|
||||
if myver[divider:] not in "1234567890":
|
||||
# letter at end
|
||||
p1 = ord(myver[divider:])
|
||||
number = float(myver[0:divider])
|
||||
else:
|
||||
number = float(myver)
|
||||
else:
|
||||
# normal number or number with letter at end
|
||||
divider = len(myver)-1
|
||||
if myver[divider:] not in "1234567890":
|
||||
#letter at end
|
||||
p1 = ord(myver[divider:])
|
||||
number = float(myver[0:divider])
|
||||
else:
|
||||
number = float(myver)
|
||||
return [number, p1, p2]
|
||||
|
||||
__vercmp_cache__ = {}
|
||||
|
||||
def vercmp_string(val1, val2):
|
||||
"""This takes two version strings and returns an integer to tell you whether
|
||||
the versions are the same, val1>val2 or val2>val1.
|
||||
"""
|
||||
|
||||
# quick short-circuit
|
||||
if val1 == val2:
|
||||
return 0
|
||||
valkey = val1 + " " + val2
|
||||
|
||||
# cache lookup
|
||||
try:
|
||||
return __vercmp_cache__[valkey]
|
||||
try:
|
||||
return - __vercmp_cache__[val2 + " " + val1]
|
||||
except KeyError:
|
||||
pass
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# consider 1_p2 vc 1.1
|
||||
# after expansion will become (1_p2,0) vc (1,1)
|
||||
# then 1_p2 is compared with 1 before 0 is compared with 1
|
||||
# to solve the bug we need to convert it to (1,0_p2)
|
||||
# by splitting _prepart part and adding it back _after_expansion
|
||||
|
||||
val1_prepart = val2_prepart = ''
|
||||
if val1.count('_'):
|
||||
val1, val1_prepart = val1.split('_', 1)
|
||||
if val2.count('_'):
|
||||
val2, val2_prepart = val2.split('_', 1)
|
||||
|
||||
# replace '-' by '.'
|
||||
# FIXME: Is it needed? can val1/2 contain '-'?
|
||||
|
||||
val1 = val1.split("-")
|
||||
if len(val1) == 2:
|
||||
val1[0] = val1[0] + "." + val1[1]
|
||||
val2 = val2.split("-")
|
||||
if len(val2) == 2:
|
||||
val2[0] = val2[0] + "." + val2[1]
|
||||
|
||||
val1 = val1[0].split('.')
|
||||
val2 = val2[0].split('.')
|
||||
|
||||
# add back decimal point so that .03 does not become "3" !
|
||||
for x in xrange(1, len(val1)):
|
||||
if val1[x][0] == '0' :
|
||||
val1[x] = '.' + val1[x]
|
||||
for x in xrange(1, len(val2)):
|
||||
if val2[x][0] == '0' :
|
||||
val2[x] = '.' + val2[x]
|
||||
|
||||
# extend varion numbers
|
||||
if len(val2) < len(val1):
|
||||
val2.extend(["0"]*(len(val1)-len(val2)))
|
||||
elif len(val1) < len(val2):
|
||||
val1.extend(["0"]*(len(val2)-len(val1)))
|
||||
|
||||
# add back _prepart tails
|
||||
if val1_prepart:
|
||||
val1[-1] += '_' + val1_prepart
|
||||
if val2_prepart:
|
||||
val2[-1] += '_' + val2_prepart
|
||||
# The above code will extend version numbers out so they
|
||||
# have the same number of digits.
|
||||
for x in xrange(0, len(val1)):
|
||||
cmp1 = relparse(val1[x])
|
||||
cmp2 = relparse(val2[x])
|
||||
for y in xrange(0, 3):
|
||||
myret = cmp1[y] - cmp2[y]
|
||||
if myret != 0:
|
||||
__vercmp_cache__[valkey] = myret
|
||||
return myret
|
||||
__vercmp_cache__[valkey] = 0
|
||||
return 0
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
@@ -721,8 +841,6 @@ def which(path, item, direction = 0):
|
||||
for p in paths:
|
||||
next = os.path.join(p, item)
|
||||
if os.path.exists(next):
|
||||
if not os.path.isabs(next):
|
||||
next = os.path.abspath(next)
|
||||
return next
|
||||
|
||||
return ""
|
||||
|
||||
@@ -35,11 +35,6 @@
|
||||
<listitem><para>Install the Eclipse Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>Configure the Eclipse Yocto Plug-in.</para></listitem>
|
||||
</orderedlist>
|
||||
<note>
|
||||
Do not install Eclipse from your distribution's package repository.
|
||||
Be sure to install Eclipse from the official Eclipse download site as directed
|
||||
in the next section.
|
||||
</note>
|
||||
</para>
|
||||
|
||||
<section id='installing-eclipse-ide'>
|
||||
@@ -64,7 +59,7 @@
|
||||
into a clean directory using the default name <filename>eclipse</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~
|
||||
$ tar -xzvf ~/Downloads/eclipse-SDK-3.7.2-linux-gtk-x86_64.tar.gz
|
||||
$ tar -xzvf ~/Downloads/eclipse-SDK-3.7.1-linux-gtk-x86_64.tar.gz
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
|
||||
@@ -50,9 +50,9 @@
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>Sometime in 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
<revnumber>1.2.1</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2.1 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
|
||||
@@ -62,9 +62,9 @@
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>Sometime in 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
<revnumber>1.2.1</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2.1 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
|
||||
<para>
|
||||
Some layers function as a layer to hold other BSP layers.
|
||||
An example of this type of layers is the <filename>meta-intel</filename> layer.
|
||||
An example of this type of layer is the <filename>meta-intel</filename> layer.
|
||||
The <filename>meta-intel</filename> layer contains over 10 individual BSP layers.
|
||||
</para>
|
||||
|
||||
@@ -121,6 +121,15 @@
|
||||
are separate components that happen to be combined in certain end products.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Before looking at the common form for the file structure inside a BSP Layer,
|
||||
you should be aware that some requirements do exist in order for a BSP to
|
||||
be considered compliant with the Yocto Project.
|
||||
For that list of requirements, see the
|
||||
"<link linkend='released-bsp-requirements'>Released BSP Requirements</link>"
|
||||
section.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Below is the common form for the file structure inside a BSP Layer.
|
||||
While you can use this basic form for the standard, realize that the actual structures
|
||||
@@ -644,6 +653,219 @@
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='requirements-and-recommendations-for-released-bsps'>
|
||||
<title>Requirements and Recommendations for Released BSPs</title>
|
||||
|
||||
<para>
|
||||
Certain requirements exist for a released BSP to be considered
|
||||
compliant with the Yocto Project.
|
||||
Additionally, a single recommendation also exists.
|
||||
This section describes the requirements and recommendation for
|
||||
released BSPs.
|
||||
</para>
|
||||
|
||||
<section id='released-bsp-requirements'>
|
||||
<title>Released BSP Requirements</title>
|
||||
|
||||
<para>
|
||||
Before looking at BSP requirements, you should consider the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>The requirements here assume the BSP layer is a well-formed, "legal"
|
||||
layer that can be added to the Yocto Project.
|
||||
For guidelines on creating a Yocto Project layer that meets these base requirements, see the
|
||||
"<link linkend='bsp-layers'>BSP Layers</link>" and the
|
||||
"<ulink url='&YOCTO_DOCS_DEV_URL;#understanding-and-creating-layers'>Understanding
|
||||
and Creating Layers"</ulink> in the Yocto Project Development Manual.</para></listitem>
|
||||
<listitem><para>The requirements in this section apply regardless of how you
|
||||
ultimately package a BSP.
|
||||
You should consult the packaging and distribution guidelines for your
|
||||
specific release process.
|
||||
For an example of packaging and distribution requirements, see the
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Third_Party_BSP_Release_Process'>Third
|
||||
Party BSP Release Process</ulink> wiki page.</para></listitem>
|
||||
<listitem><para>The requirements for the BSP as it is made available to a developer
|
||||
are completely independent of the released form of the BSP.
|
||||
For example, the BSP metadata can be contained within a Git repository
|
||||
and could have a directory structure completely different from what appears
|
||||
in the officially released BSP layer.</para></listitem>
|
||||
<listitem><para>It is not required that specific packages or package
|
||||
modifications exist in the BSP layer, beyond the requirements for general
|
||||
compliance with the Yocto Project.
|
||||
For example, no requirement exists dictating that a specific kernel or
|
||||
kernel version be used in a given BSP.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Following are the requirements for a released BSP that conforms to the
|
||||
Yocto Project:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Layer Name:</emphasis>
|
||||
The BSP must have a layer name that follows the Yocto
|
||||
Project standards.
|
||||
For information on BSP layer names, see the
|
||||
"<link linkend='bsp-layers'>BSP Layers</link>" section.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>File System Layout:</emphasis>
|
||||
When possible, use the same directory names in your
|
||||
BSP layer as listed in the <filename>recipes.txt</filename> file.
|
||||
In particular, you should place recipes
|
||||
(<filename>.bb</filename> files) and recipe
|
||||
modifications (<filename>.bbappend</filename> files) into
|
||||
<filename>recipes-*</filename> subdirectories by functional area
|
||||
as outlined in <filename>recipes.txt</filename>.
|
||||
If you cannot find a category in <filename>recipes.txt</filename>
|
||||
to fit a particular recipe, you can make up your own
|
||||
<filename>recipe-*</filename> subdirectory.
|
||||
You can find <filename>recipes.txt</filename> in the
|
||||
<filename>meta</filename> directory of the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#yocto-project-files'>Yocto
|
||||
Project Files</ulink>, or in the OpenEmbedded Core Layer
|
||||
(<filename>openembedded-core</filename>) found at
|
||||
<ulink url='http://git.openembedded.org/openembedded-core/tree/meta'></ulink>.
|
||||
</para>
|
||||
<para>Within any particular <filename>recipes-*</filename> category, the layout
|
||||
should match what is found in the OpenEmbedded Core
|
||||
Git repository (<filename>openembedded-core</filename>)
|
||||
or the Yocto Project Files (<filename>poky</filename>).
|
||||
In other words, make sure you place related files in appropriately
|
||||
related <filename>recipes-*</filename> subdirectories specific to the
|
||||
recipe's function, or within a subdirectory containing a set of closely-related
|
||||
recipes.
|
||||
The recipes themselves should follow the general guidelines
|
||||
for recipes used in the Yocto Project found in the
|
||||
<ulink url='https://wiki.yoctoproject.org/wiki/Recipe_%26_Patch_Style_Guide'>Yocto
|
||||
Recipe and Patch Style Guide</ulink>.</para></listitem>
|
||||
<listitem><para><emphasis>License File:</emphasis>
|
||||
You must include a license file in the
|
||||
<filename>meta-<bsp_name></filename> directory.
|
||||
This license covers the BSP metadata as a whole.
|
||||
You must specify which license to use since there is no
|
||||
default license if one is not specified.
|
||||
See the
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi/meta-intel/tree/meta-fishriver/COPYING.MIT'><filename>COPYING.MIT</filename></ulink>
|
||||
file for the Fish River BSP in the <filename>meta-fishriver</filename> BSP layer
|
||||
as an example.</para></listitem>
|
||||
<listitem><para><emphasis>README File:</emphasis>
|
||||
You must include a <filename>README</filename> file in the
|
||||
<filename>meta-<bsp_name></filename> directory.
|
||||
See the
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi/meta-intel/tree/meta-fishriver/README'><filename>README</filename></ulink>
|
||||
file for the Fish River BSP in the <filename>meta-fishriver</filename> BSP layer
|
||||
as an example.</para>
|
||||
<para>At a minimum, the <filename>README</filename> file should
|
||||
contain the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>A brief description about the hardware the BSP
|
||||
targets.</para></listitem>
|
||||
<listitem><para>A list of all the dependencies a
|
||||
on which a BSP layer depends.
|
||||
These dependencies are typically a list of required layers needed
|
||||
to build the BSP.
|
||||
However, the dependencies should also contain information regarding
|
||||
any other dependencies the BSP might have.</para></listitem>
|
||||
<listitem><para>Any required special licensing information.
|
||||
For example, this information includes information on
|
||||
special variables needed to satisfy a EULA,
|
||||
or instructions on information needed to build or distribute
|
||||
binaries built from the BSP metadata.</para></listitem>
|
||||
<listitem><para>The name and contact information for the
|
||||
BSP layer maintainer.
|
||||
This is the person to whom patches and questions should
|
||||
be sent.</para></listitem>
|
||||
<listitem><para>Instructions on how to build the BSP using the BSP
|
||||
layer.</para></listitem>
|
||||
<listitem><para>Instructions on how to boot the BSP build from
|
||||
the BSP layer.</para></listitem>
|
||||
<listitem><para>Instructions on how to boot the binary images
|
||||
contained in the <filename>/binary</filename> directory,
|
||||
if present.</para></listitem>
|
||||
<listitem><para>Information on any known bugs or issues that users
|
||||
should know about when either building or booting the BSP
|
||||
binaries.</para></listitem>
|
||||
</itemizedlist></para></listitem>
|
||||
<listitem><para><emphasis>README.sources File:</emphasis>
|
||||
You must include a <filename>README.sources</filename> in the
|
||||
<filename>meta-<bsp_name></filename> directory.
|
||||
This file specifies exactly where you can find the sources used to
|
||||
generate the binary images contained in the
|
||||
<filename>/binary</filename> directory, if present.
|
||||
See the
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi/meta-intel/tree/meta-fishriver/README.sources'><filename>README.sources</filename></ulink>
|
||||
file for the Fish River BSP in the <filename>meta-fishriver</filename> BSP layer
|
||||
as an example.</para></listitem>
|
||||
<listitem><para><emphasis>Layer Configuration File:</emphasis>
|
||||
You must include a <filename>conf/layer.conf</filename> in the
|
||||
<filename>meta-<bsp_name></filename> directory.
|
||||
This file identifies the <filename>meta-<bsp_name></filename>
|
||||
BSP layer as a layer to the build system.</para></listitem>
|
||||
<listitem><para><emphasis>Machine Configuration File:</emphasis>
|
||||
You must include a <filename>conf/machine/<bsp_name>.conf</filename>
|
||||
in the <filename>meta-<bsp_name></filename> directory.
|
||||
This configuration file defines a machine target that can be built
|
||||
using the BSP layer.
|
||||
Multiple machine configuration files define variations of machine
|
||||
configurations that are supported by the BSP.
|
||||
If a BSP supports more multiple machine variations, you need to
|
||||
adequately describe each variation in the BSP
|
||||
<filename>README</filename> file.
|
||||
Do not use multiple machine configuration files to describe disparate
|
||||
hardware.
|
||||
Multiple machine configuration files should describe very similar targets.
|
||||
If you do have very different targets, you should create a separate
|
||||
BSP.
|
||||
<note>It is completely possible for a developer to structure the
|
||||
working repository as a conglomeration of unrelated BSP
|
||||
files, and to possibly generate specifically targeted 'release' BSPs
|
||||
from that directory using scripts or some other mechanism.
|
||||
Such considerations are outside the scope of this document.</note>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='released-bsp-recommendations'>
|
||||
<title>Released BSP Recommendations</title>
|
||||
|
||||
<para>
|
||||
Following are recommendations for a released BSP that conforms to the
|
||||
Yocto Project:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Bootable Images:</emphasis>
|
||||
BSP releases
|
||||
can contain one or more bootable images.
|
||||
Including bootable images allows users to easily try out the BSP
|
||||
on their own hardware.</para>
|
||||
<para>In some cases, it might not be convenient to include a
|
||||
bootable image.
|
||||
In this case, you might want to make two versions of the
|
||||
BSP available: one that contains binary images, and one
|
||||
that does not.
|
||||
The version that does not contain bootable images avoids
|
||||
unnecessary download times for users not interested in the images.
|
||||
</para>
|
||||
<para>If you need to distribute a BSP and include bootable images or build kernel and
|
||||
filesystems meant to allow users to boot the BSP for evaluation
|
||||
purposes, you should put the images and artifacts within a
|
||||
<filename>binary/</filename> subdirectory located in the
|
||||
<filename>meta-<bsp_name></filename> directory.
|
||||
<note>If you do include a bootable image as part of the BSP and the image
|
||||
was built by software covered by the GPL or other open source licenses,
|
||||
it is your responsibility to understand
|
||||
and meet all licensing requirements, which could include distribution
|
||||
of source files.</note></para></listitem>
|
||||
<listitem><para><emphasis>Use a Yocto Linux Kernel:</emphasis>
|
||||
Kernel recipes in the BSP should be based on a Yocto Linux kernel.
|
||||
Basing your recipes on these kernels reduces the costs for maintaining
|
||||
the BSP and increases its scalability.
|
||||
See the <filename>Yocto Linux Kernel</filename> category in the
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit.cgi'><filename>Yocto Source Repositories</filename></ulink>
|
||||
for these kernels.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='customizing-a-recipe-for-a-bsp'>
|
||||
<title>Customizing a Recipe for a BSP</title>
|
||||
|
||||
@@ -760,7 +982,7 @@
|
||||
restart the build to continue where it left off.
|
||||
During the build, the prompt will not appear again
|
||||
since you have satisfied the requirement.</para>
|
||||
<para>Once the appropriate license flags are whitelisted
|
||||
<para>Once the appropriate license flags are on the white list
|
||||
in the <filename>LICENSE_FLAGS_WHITELIST</filename> variable, you
|
||||
can build the encumbered image with no change at all
|
||||
to the normal build process.</para></listitem>
|
||||
@@ -931,7 +1153,7 @@
|
||||
<para>
|
||||
Now that you know where these two commands reside and how to access information
|
||||
on them, you should find it relatively straightforward to discover the commands
|
||||
necessary to create a BSP and perform basic kernel maintainence on that BSP using
|
||||
necessary to create a BSP and perform basic kernel maintenance on that BSP using
|
||||
the tools.
|
||||
The next sections provide a concrete starting point to expand on a few points that
|
||||
might not be immediately obvious or that could use further explanation.
|
||||
@@ -990,7 +1212,7 @@
|
||||
In every other way, this architecture is representative of how creating a BSP for
|
||||
a 'real' machine would work.
|
||||
The reason the example uses this architecture is because it is an emulated architecture
|
||||
and can easily be followed without requireing actual hardware.
|
||||
and can easily be followed without requiring actual hardware.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -1059,7 +1281,7 @@
|
||||
If you enter 'n', the script prompts you to further enter the kernel
|
||||
you do want to use (e.g. 3.0, 3.2_preempt-rt, etc.).</para></listitem>
|
||||
<listitem><para>Next, the script asks whether you would like to have a new
|
||||
branch created especially for your BSPin the local
|
||||
branch created especially for your BSP in the local
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#local-kernel-files'>Linux Yocto Kernel</ulink>
|
||||
Git repository .
|
||||
If not, then the script re-uses an existing branch.</para>
|
||||
|
||||
@@ -178,6 +178,21 @@
|
||||
<filename>poky-extras</filename> Git Repository</link>"
|
||||
for information on how to get the <filename>poky-extras</filename> repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because this example uses the Yocto Project &DISTRO; Release code
|
||||
named "&DISTRO_NAME;", which maps to the <filename>&DISTRO_NAME;</filename>
|
||||
branch in the repository, you need to be sure you are using that
|
||||
branch for <filename>poky-extra</filename>.
|
||||
The following commands create and checkout the local
|
||||
branch you are using for the <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-bare-clone-and-its-copy'>
|
||||
@@ -442,19 +457,15 @@
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>Before attempting to build the modified kernel, there is one more set of changes you
|
||||
Before attempting to build the modified kernel, there is one more set of changes you
|
||||
need to make in the <filename>meta-kernel-dev</filename> layer.
|
||||
Because all the kernel <filename>.bbappend</filename> files are parsed during the
|
||||
build process regardless of whether you are using them or not, you should either
|
||||
comment out the <filename>COMPATIBLE_MACHINE</filename> statements in all
|
||||
unused <filename>.bbappend</filename> files, or simply remove (or rename) all the files
|
||||
unused <filename>.bbappend</filename> files.
|
||||
Alternatively, you can simply remove all the files
|
||||
except the one your are using for the build
|
||||
(i.e. <filename>linux-yocto_3.2.bbappend</filename> in this example).</para>
|
||||
<para>If you do not make one of these two adjustments, your machine will be compatible
|
||||
with all the kernel recipes in the <filename>meta-kernel-dev</filename> layer.
|
||||
When your machine is comapatible with all the kernel recipes, the build attempts
|
||||
to build all kernels in the layer.
|
||||
You could end up with build errors blocking your work.</para>
|
||||
(i.e. <filename>linux-yocto_3.2.bbappend</filename> in this example).
|
||||
</note>
|
||||
</section>
|
||||
|
||||
@@ -764,6 +775,17 @@
|
||||
$ bitbake linux-yocto
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
Manually turning off a kernel configuration setting such as
|
||||
<filename>CONFIG_SMP</filename> can cause the kernel configuration audit
|
||||
to issue warnings during the build.
|
||||
In this example, warnings appear telling you that the expected value
|
||||
<filename>CONFIG_SMP</filename> does not appear in the <filename>.config</filename>
|
||||
file.
|
||||
Because in this example you specifically turned off <filename>CONFIG_SMP</filename>,
|
||||
you can safely ignore the apparent conflict.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
Now run the QEMU emulator and pass it the same multi-processor option as before:
|
||||
@@ -811,8 +833,352 @@
|
||||
width="2in" depth="3in" align="center" scalefit="1" />
|
||||
</para>
|
||||
</section>
|
||||
|
||||
|
||||
|
||||
<!-- <section id='is-vfat-supported'>
|
||||
<title>Is VFAT Supported?</title>
|
||||
|
||||
<para>
|
||||
<literallayout class='monospaced'>
|
||||
I entered runqemu qemux86 and it fires upthis fires up the emulator and uses the
|
||||
image and filesystem in the build area created in the previous section.
|
||||
|
||||
Then I copied over a pre-created and formated 5.2MB VFAT file named vfat.img.
|
||||
I did this with scp vfat.img root@192.168.7.2:
|
||||
The file is in the root directory.
|
||||
I had to do this because the mkfs.vfat vfat.img command does not work.
|
||||
mkfs is not recognized in the qemu terminal session.
|
||||
|
||||
when I try mount -o loop -t vfat vfat.img mnt/ I get the error
|
||||
mount: can't set up loop device: No space left on device.
|
||||
This error is because the loop module is not currently in the kernel image.
|
||||
However, this module is available in the
|
||||
build area in the tarball modules-2.6.37.6-yocto-starndard+-20-qemux86.tgz.
|
||||
You can add this to the kernel image by adding the
|
||||
IMAGE_INSTALL += " kernel-module-loop" statement at the top of the local.conf
|
||||
file in the build area and then rebuilding the kernel using bitbake.
|
||||
It should just build whatever is necessary and not go through an entire build again.
|
||||
|
||||
|
||||
|
||||
|
||||
The <filename>menuconfig</filename> tool provides an interactive method with which
|
||||
to set kernel configurations.
|
||||
In order to use <filename>menuconfig</filename> from within the BitBake environment
|
||||
you need to source an environment setup script.
|
||||
This script is located in the local Yocto Project file structure and is called
|
||||
<filename>oe-init-build-env</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following command sets up the environment:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ source oe-init-build-env
|
||||
$ runqemu qemux86
|
||||
Continuing with the following parameters:
|
||||
KERNEL: [/home/scottrif/poky/build/tmp/deploy/images/bzImage-qemux86.bin]
|
||||
ROOTFS: [/home/scottrif/poky/build/tmp/deploy/images/core-image-sato-qemux86.ext3]
|
||||
FSTYPE: [ext3]
|
||||
Setting up tap interface under sudo
|
||||
Acquiring lockfile for tap0...
|
||||
WARNING: distccd not present, no distcc support loaded.
|
||||
Running qemu...
|
||||
/home/scottrif/poky/build/tmp/sysroots/x86_64-linux/usr/bin/qemu
|
||||
-kernel /home/scottrif/poky/build/tmp/deploy/images/bzImage-qemux86.bin
|
||||
-net nic,vlan=0 -net tap,vlan=0,ifname=tap0,script=no,downscript=no
|
||||
-hda /home/scottrif/poky/build/tmp/deploy/images/core-image-sato-qemux86.ext3
|
||||
-show-cursor -usb -usbdevice wacom-tablet -vga vmware -enable-gl -no-reboot
|
||||
-m 128 ‐‐append "vga=0 root=/dev/hda rw mem=128M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 "
|
||||
Enabling opengl
|
||||
vmsvga_value_write: guest runs Linux.
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='prepare-to-use-menuconfig'>
|
||||
<title>Prepare to use <filename>menuconfig</filename></title>
|
||||
|
||||
|
||||
<para>
|
||||
[WRITER'S NOTE: Stuff from here down are crib notes]
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once menuconfig fires up you see all kinds of categories that you can interactively
|
||||
investigate.
|
||||
If they have an "M" in it then the feature is "modularized".
|
||||
I guess that means that means that it needs to be manually linked in when the
|
||||
kernel is booted??? (Not sure).
|
||||
If they have an "*" then the feature is automatically part of the kernel.]
|
||||
</para>
|
||||
|
||||
<para>
|
||||
So the tmp/work/ area was created in poky and there is a .config file in there and
|
||||
a .config.old file.
|
||||
The old one must have been created when I exited from menuconfig after poking around
|
||||
a bit.
|
||||
Nope - appears to just be created automatically.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
A good practice is to first determine what configurations you have for the kernel.
|
||||
You can see the results by looking in the .config file in the build/tmp/work/qemux86-poky-linux area
|
||||
of the local YP files.
|
||||
There is a directory named linux-yocto-2.6.37* in the directory.
|
||||
In that directory is a directory named linux-qemux86-standard-build.
|
||||
In that directory you will find a file named .config that is the configuration file
|
||||
for the kernel that will be used when you build the kernel.
|
||||
You can open that file up and examine it.
|
||||
If you do a search for "VFAT" you will see that that particular configuration is not
|
||||
enabled for the kernel.
|
||||
This means that you cannot print a VFAT text file, or for that matter, even mount one
|
||||
from the image if you were to build it at this point.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You can prove the point by actually trying it at this point.
|
||||
Here are the commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ mkdir ~/vfat-test
|
||||
$ cd ~/vfat-test
|
||||
$ dd if=/dev/zero of=vfat.img bs=1024 count=5000 [creates a 5MB disk image]
|
||||
5+0 records in
|
||||
5+0 records out
|
||||
5242880 bytes (5.2 MB) copied, 0.00798912 s, 656 MB/s
|
||||
$ ls -lah [lists the contents of the new image. l=long, a=all, h=human readable]
|
||||
total 5.1M
|
||||
drwxr-xr-x 2 srifenbark scottrif 4.0K 2011-08-01 08:18 .
|
||||
drwxr-xr-x 66 srifenbark scottrif 4.0K 2011-08-01 08:14 ..
|
||||
-rw-r‐‐r‐‐ 1 srifenbark scottrif 5.0M 2011-08-01 08:18 vfat.img
|
||||
$ mkfs.vfat vfat.img [formats the disk image]
|
||||
mkfs.vfat 3.0.7 (24 Dec 2009)
|
||||
$ mkdir mnt [mounts the disk image]
|
||||
$ sudo su [gives you root privilege]
|
||||
# mount -o loop vfat.img mnt [mounts it as a loop device]
|
||||
# ls mnt [shows nothing in mnt]
|
||||
# mount [lists the mounted filesystems - note/dev/loop0]
|
||||
/dev/sda1 on / type ext4 (rw,errors=remount-ro)
|
||||
proc on /proc type proc (rw,noexec,nosuid,nodev)
|
||||
none on /sys type sysfs (rw,noexec,nosuid,nodev)
|
||||
none on /sys/fs/fuse/connections type fusectl (rw)
|
||||
none on /sys/kernel/debug type debugfs (rw)
|
||||
none on /sys/kernel/security type securityfs (rw)
|
||||
none on /dev type devtmpfs (rw,mode=0755)
|
||||
none on /dev/pts type devpts (rw,noexec,nosuid,gid=5,mode=0620)
|
||||
none on /dev/shm type tmpfs (rw,nosuid,nodev)
|
||||
none on /var/run type tmpfs (rw,nosuid,mode=0755)
|
||||
none on /var/lock type tmpfs (rw,noexec,nosuid,nodev)
|
||||
none on /lib/init/rw type tmpfs (rw,nosuid,mode=0755)
|
||||
binfmt_misc on /proc/sys/fs/binfmt_misc type binfmt_misc (rw,noexec,nosuid,nodev)
|
||||
gvfs-fuse-daemon on /home/scottrif/.gvfs type fuse.gvfs-fuse-daemon (rw,nosuid,nodev,user=srifenbark)
|
||||
/dev/loop0 on /home/scottrif/vfat-test/mnt type vfat (rw)
|
||||
# echo "hello world" > mnt/hello.txt [creates a text file in the mounted VFAT system]
|
||||
# ls mnt [verifies the file is there]
|
||||
hello.txt
|
||||
# cat mnt/hello.txt [displays the contents of the file created]
|
||||
hello world
|
||||
# umount mnt [unmounts the system and destroys the loop]
|
||||
# exit [gets out of privileged user mode]
|
||||
exit
|
||||
|
||||
$ lsmod [this stuff Darren did to show me ]
|
||||
Module Size Used by [the status of modules in the regular linux kernel]
|
||||
nls_iso8859_1 4633 0
|
||||
nls_cp437 6351 0
|
||||
vfat 10866 0
|
||||
fat 55350 1 vfat
|
||||
snd_hda_codec_atihdmi 3023 1
|
||||
binfmt_misc 7960 1
|
||||
snd_hda_codec_realtek 279008 1
|
||||
ppdev 6375 0
|
||||
snd_hda_intel 25805 2
|
||||
fbcon 39270 71
|
||||
tileblit 2487 1 fbcon
|
||||
font 8053 1 fbcon
|
||||
bitblit 5811 1 fbcon
|
||||
snd_hda_codec 85759 3 snd_hda_codec_atihdmi,snd_hda_codec_realtek,snd_hda_intel
|
||||
softcursor 1565 1 bitblit
|
||||
snd_seq_dummy 1782 0
|
||||
snd_hwdep 6924 1 snd_hda_codec
|
||||
vga16fb 12757 0
|
||||
snd_pcm_oss 41394 0
|
||||
snd_mixer_oss 16299 1 snd_pcm_oss
|
||||
snd_pcm 87946 3 snd_hda_intel,snd_hda_codec,snd_pcm_oss
|
||||
vgastate 9857 1 vga16fb
|
||||
snd_seq_oss 31191 0
|
||||
snd_seq_midi 5829 0
|
||||
snd_rawmidi 23420 1 snd_seq_midi
|
||||
radeon 744506 3
|
||||
snd_seq_midi_event 7267 2 snd_seq_oss,snd_seq_midi
|
||||
ttm 61007 1 radeon
|
||||
snd_seq 57481 6 snd_seq_dummy,snd_seq_oss,snd_seq_midi,snd_seq_midi_event
|
||||
drm_kms_helper 30742 1 radeon
|
||||
snd_timer 23649 2 snd_pcm,snd_seq
|
||||
snd_seq_device 6888 5 snd_seq_dummy,snd_seq_oss,snd_seq_midi,snd_rawmidi,snd_seq
|
||||
usb_storage 50377 0
|
||||
snd 71283 16 \
|
||||
snd_hda_codec_realtek,snd_hda_intel,snd_hda_codec, \
|
||||
snd_hwdep,snd_pcm_oss,snd_mixer_oss,snd_pcm, \
|
||||
snd_seq_oss,snd_rawmidi,snd_seq,snd_timer,snd_seq_device
|
||||
soundcore 8052 1 snd
|
||||
psmouse 65040 0
|
||||
drm 198886 5 radeon,ttm,drm_kms_helper
|
||||
i2c_algo_bit 6024 1 radeon
|
||||
serio_raw 4918 0
|
||||
snd_page_alloc 8500 2 snd_hda_intel,snd_pcm
|
||||
dell_wmi 2177 0
|
||||
dcdbas 6886 0
|
||||
lp 9336 0
|
||||
parport 37160 2 ppdev,lp
|
||||
usbhid 41116 0
|
||||
ohci1394 30260 0
|
||||
hid 83888 1 usbhid
|
||||
ieee1394 94771 1 ohci1394
|
||||
tg3 122382 0
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
</section> -->
|
||||
</appendix>
|
||||
|
||||
<!--
|
||||
|
||||
|
||||
EXTRA STUFF I MIGHT NEED BUT NOW SURE RIGHT NOW.
|
||||
|
||||
In the standard layer structure you have several areas that you need to examine or
|
||||
modify.
|
||||
For this example the layer contains four areas:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>conf</filename></emphasis> - Contains the
|
||||
<filename>layer.conf</filename> that identifies the location of the recipe files.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>images</filename></emphasis> - Contains the
|
||||
image recipe file.
|
||||
This recipe includes the base image you will be using and specifies other
|
||||
packages the image might need.</para></listitem>
|
||||
<listitem><para><emphasis><filename>recipes-bsp</filename></emphasis> - Contains
|
||||
recipes specific to the hardware for which you are developing the kernel.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>recipes-kernel</filename></emphasis> - Contains the
|
||||
"append" files that add information to the main recipe kernel.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Let's take a look at the <filename>layer.conf</filename> in the
|
||||
<filename>conf</filename> directory first.
|
||||
This configuration file enables the Yocto Project build system to locate and
|
||||
use the information in your new layer.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The variable <filename>BBPATH</filename> needs to include the path to your layer
|
||||
as follows:
|
||||
<literallayout class='monospaced'>
|
||||
BBPATH := "${BBPATH}:${LAYERDIR}"
|
||||
</literallayout>
|
||||
And, the variable <filename>BBFILES</filename> needs to be modified to include your
|
||||
recipe and append files:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILES := "${BBFILES} ${LAYERDIR}/images/*.bb \
|
||||
${LAYERDIR}/images/*.bbappend \
|
||||
${LAYERDIR}/recipes-*/*/*.bb \
|
||||
${LAYERDIR}/recipes-*/*/*.bbappend"
|
||||
</literallayout>
|
||||
Finally, you need to be sure to use your layer name in these variables at the
|
||||
end of the file:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILE_COLLECTIONS += "elc"
|
||||
BBFILE_PATTERN_elc := "^${LAYERDIR}/"
|
||||
BBFILE_PRIORITY_elc = "9"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>images</filename> directory contains an append file that helps
|
||||
further define the image.
|
||||
In our example, the base image is <filename>core-image-minimal</filename>.
|
||||
The image does, however, need some additional modules that we are using
|
||||
for this example.
|
||||
These modules support the amixer functionality.
|
||||
Here is the append file:
|
||||
<literallayout class='monospaced'>
|
||||
require recipes-core/images/poky-image-minimal.bb
|
||||
|
||||
IMAGE_INSTALL += "dropbear alsa-utils-aplay alsa-utils-alsamixer"
|
||||
IMAGE_INSTALL_append_qemux86 += " kernel-module-snd-ens1370 \
|
||||
kernel-module-snd-rawmidi kernel-module-loop kernel-module-nls-cp437 \
|
||||
kernel-module-nls-iso8859-1 qemux86-audio alsa-utils-amixer"
|
||||
|
||||
LICENSE = "MIT"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
While the focus of this example is not on the BSP, it is worth mentioning that the
|
||||
<filename>recipes-bsp</filename> directory has the recipes and append files for
|
||||
features that the hardware requires.
|
||||
In this example, there is a script and a recipe to support the
|
||||
<filename>amixer</filename> functionality in QEMU.
|
||||
It is beyond the scope of this manual to go too deeply into the script.
|
||||
Suffice it to say that the script tests for the presence of the mixer, sets up
|
||||
default mixer values, enables the mixer, unmutes master and then
|
||||
sets the volume to 100.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The recipe <filename>qemu86-audio.bb</filename> installs and runs the
|
||||
<filename>amixer</filename> when the system boots.
|
||||
Here is the recipe:
|
||||
<literallayout class='monospaced'>
|
||||
SUMMARY = "Provide a basic init script to enable audio"
|
||||
DESCRIPTION = "Set the volume and unmute the Front mixer setting during boot."
|
||||
SECTION = "base"
|
||||
LICENSE = "MIT"
|
||||
LIC_FILES_CHKSUM = "file://${POKYBASE}/LICENSE;md5=3f40d7994397109285ec7b81fdeb3b58"
|
||||
|
||||
PR = "r4"
|
||||
|
||||
inherit update-rc.d
|
||||
|
||||
RDEPENDS = "alsa-utils-amixer"
|
||||
|
||||
SRC_URI = "file://qemux86-audio"
|
||||
|
||||
INITSCRIPT_NAME = "qemux86-audio"
|
||||
INITSCRIPT_PARAMS = "defaults 90"
|
||||
|
||||
do_install() {
|
||||
install -d ${D}${sysconfdir} \
|
||||
${D}${sysconfdir}/init.d
|
||||
install -m 0755 ${WORKDIR}/qemux86-audio ${D}${sysconfdir}/init.d
|
||||
cat ${WORKDIR}/${INITSCRIPT_NAME} | \
|
||||
sed -e 's,/etc,${sysconfdir},g' \
|
||||
-e 's,/usr/sbin,${sbindir},g' \
|
||||
-e 's,/var,${localstatedir},g' \
|
||||
-e 's,/usr/bin,${bindir},g' \
|
||||
-e 's,/usr,${prefix},g' > ${D}${sysconfdir}/init.d/${INITSCRIPT_NAME}
|
||||
chmod 755 ${D}${sysconfdir}/init.d/${INITSCRIPT_NAME}
|
||||
}
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The last area to look at is <filename>recipes-kernel</filename>.
|
||||
This area holds configuration fragments and kernel append files.
|
||||
The append file must have the same name as the kernel recipe, which is
|
||||
<filename>linux-yocto-2.6.37</filename> in this example.
|
||||
The file can <filename>SRC_URI</filename> statements to point to configuration
|
||||
fragments you might have in the layer.
|
||||
The file can also contain <filename>KERNEL_FEATURES</filename> statements that specify
|
||||
included kernel configurations that ship with the Yocto Project.
|
||||
</para>
|
||||
-->
|
||||
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||
|
||||
@@ -40,9 +40,9 @@
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>Sometime in 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
<revnumber>1.2.1</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2.1 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
|
||||
@@ -46,9 +46,9 @@
|
||||
You can find the files used to describe all the valid features and BSPs
|
||||
in the Yocto Project kernel in any clone of the Linux Yocto kernel source repository Git tree.
|
||||
For example, the following command clones the Yocto Project baseline kernel that
|
||||
branched off of <filename>linux.org</filename> version 3.4:
|
||||
branched off of <filename>linux.org</filename> version 3.0:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/linux-yocto-3.4
|
||||
$ git clone git://git.yoctoproject.org/linux-yocto-3.0
|
||||
</literallayout>
|
||||
For another example of how to set up a local Git repository of the Linux Yocto
|
||||
kernel files, see the
|
||||
@@ -58,9 +58,9 @@
|
||||
Once you have cloned the kernel Git repository on your local machine, you can
|
||||
switch to the <filename>meta</filename> branch within the repository.
|
||||
Here is an example that assumes the local Git repository for the kernel is in
|
||||
a top-level directory named <filename>linux-yocto-3.4</filename>:
|
||||
a top-level directory named <filename>linux-yocto-3.0</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/linux-yocto-3.4
|
||||
$ cd ~/linux-yocto-3.0
|
||||
$ git checkout -b meta origin/meta
|
||||
</literallayout>
|
||||
Once you have checked out and switched to the <filename>meta</filename> branch,
|
||||
@@ -598,9 +598,9 @@
|
||||
<para>
|
||||
For example, the following command pushes the changes from your local branch
|
||||
<filename>yocto/standard/common-pc/base</filename> to the remote branch with the same name
|
||||
in the master repository <filename>//git.mycompany.com/pub/git/kernel-3.4</filename>.
|
||||
in the master repository <filename>//git.mycompany.com/pub/git/kernel-3.0</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
> git push ssh://git.mycompany.com/pub/git/kernel-3.4 \
|
||||
> git push ssh://git.mycompany.com/pub/git/kernel-3.0 \
|
||||
yocto/standard/common-pc/base:yocto/standard/common-pc/base
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
@@ -55,9 +55,9 @@
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>Sometime in 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
<revnumber>1.2.1</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2.1 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
|
||||
@@ -69,9 +69,9 @@
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>Sometime in 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
<revnumber>1.2.1</revnumber>
|
||||
<date>July 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2.1 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
|
||||
@@ -46,10 +46,7 @@
|
||||
<listitem><para><emphasis><filename>core-image-minimal</filename>:</emphasis>
|
||||
A small image just capable of allowing a device to boot.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-minimal-dev</filename>:</emphasis>
|
||||
A <filename>core-image-minimal</filename> image suitable for development work
|
||||
using the host.
|
||||
The image includes headers and libraries you can use in a host development
|
||||
environment.
|
||||
A <filename>core-image-minimal</filename> image suitable for development work.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-minimal-initramfs</filename>:</emphasis>
|
||||
A <filename>core-image-minimal</filename> image that has the Minimal RAM-based
|
||||
@@ -68,17 +65,13 @@
|
||||
A <filename>core-image-basic</filename> image suitable for implementations
|
||||
that conform to Linux Standard Base (LSB).</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-lsb-dev</filename>:</emphasis>
|
||||
A <filename>core-image-lsb</filename> image that is suitable for development work
|
||||
using the host.
|
||||
The image includes headers and libraries you can use in a host development
|
||||
environment.
|
||||
A <filename>core-image-lsb</filename> image that is suitable for development work.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-lsb-sdk</filename>:</emphasis>
|
||||
A <filename>core-image-lsb</filename> that includes everything in meta-toolchain
|
||||
but also includes development headers and libraries to form a complete standalone SDK.
|
||||
This image is suitable for development using the target.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using
|
||||
the Meta-Toolchain</link>" section for more information.</para></listitem>
|
||||
but also includes development headers and libraries to form a complete standalone SDK.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using the Meta-Toolchain</link>"
|
||||
section for more information.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-clutter</filename>:</emphasis>
|
||||
An image with support for the Open GL-based toolkit Clutter, which enables development of
|
||||
rich and animated graphical user interfaces.</para></listitem>
|
||||
@@ -88,17 +81,16 @@
|
||||
The image supports X11 with a Sato theme and Pimlico applications and also
|
||||
contains terminal, editor, and file manager.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-sato-dev</filename>:</emphasis>
|
||||
A <filename>core-image-sato</filename> image suitable for development
|
||||
using the host.
|
||||
The image includes libraries needed to build applications on the device itself,
|
||||
testing and profiling tools, and debug symbols.
|
||||
A <filename>core-image-sato</filename> image suitable for development
|
||||
that also includes a native toolchain and libraries needed to build applications on
|
||||
the device itself.
|
||||
The image also includes testing and profiling tools as well as debug symbols.
|
||||
This image was formerly <filename>core-image-sdk</filename>.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-sato-sdk</filename>:</emphasis>
|
||||
A <filename>core-image-sato</filename> image that includes everything in meta-toolchain.
|
||||
The image also includes development headers and libraries to form a complete standalone SDK
|
||||
and is suitable for development using the target.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using
|
||||
the Meta-Toolchain</link>" section for more information.</para></listitem>
|
||||
The image also includes development headers and libraries to form a complete standalone SDK.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using the Meta-Toolchain</link>"
|
||||
section for more information.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-rt</filename>:</emphasis>
|
||||
A <filename>core-image-minimal</filename> image plus a real-time test suite and
|
||||
tools appropriate for real-time use.</para></listitem>
|
||||
@@ -106,9 +98,9 @@
|
||||
A <filename>core-image-rt</filename> image that includes everything in
|
||||
<filename>meta-toolchain</filename>.
|
||||
The image also includes development headers and libraries to form a complete
|
||||
stand-alone SDK and is suitable for development using the target.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using
|
||||
the Meta-Toolchain</link>" section for more information.</para></listitem>
|
||||
stand-alone SDK.
|
||||
See the "<link linkend='platdev-appdev-external-sdk'>External Development Using the Meta-Toolchain</link>"
|
||||
section for more information.</para></listitem>
|
||||
<listitem><para><emphasis><filename>core-image-gtk-directfb</filename>:</emphasis>
|
||||
An image that uses <filename>gtk+</filename> over <filename>directfb</filename>
|
||||
instead of X11.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<!ENTITY DISTRO "1.3">
|
||||
<!ENTITY DISTRO_NAME "1.2+snapshot">
|
||||
<!ENTITY YOCTO_DOC_VERSION "latest">
|
||||
<!ENTITY POKYVERSION "8.0">
|
||||
<!ENTITY DISTRO "1.2.1">
|
||||
<!ENTITY DISTRO_NAME "denzil">
|
||||
<!ENTITY YOCTO_DOC_VERSION "current">
|
||||
<!ENTITY POKYVERSION "7.0.1">
|
||||
<!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
|
||||
<!ENTITY COPYRIGHT_YEAR "2010-2012">
|
||||
<!ENTITY YOCTO_DL_URL "http://downloads.yoctoproject.org">
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
DESCRIPTION = "FarSight is an audio/video conferencing framework specifically designed for Instant Messengers."
|
||||
HOMEPAGE = "http://farsight.sf.net"
|
||||
SRC_URI = "http://farsight.freedesktop.org/releases/farsight2/${BPN}-${PV}.tar.gz"
|
||||
LICENSE = "LGPLv2.1"
|
||||
DEPENDS = "libnice glib-2.0 libxml2 zlib dbus gstreamer gst-plugins-base"
|
||||
|
||||
inherit autotools
|
||||
|
||||
PR = "r2"
|
||||
|
||||
EXTRA_OECONF = " \
|
||||
--disable-debug \
|
||||
--disable-gtk-doc \
|
||||
--disable-python \
|
||||
"
|
||||
|
||||
FILES_${PN} += "${libdir}/*/*.so"
|
||||
FILES_${PN}-dev += "${libdir}/f*/*a ${libdir}/g*/*a"
|
||||
FILES_${PN}-dbg += "${libdir}/*/.debug"
|
||||
|
||||
|
||||
|
||||
|
||||
23
meta-demoapps/recipes-connectivity/farsight/libnice_0.0.6.bb
Normal file
23
meta-demoapps/recipes-connectivity/farsight/libnice_0.0.6.bb
Normal file
@@ -0,0 +1,23 @@
|
||||
SUMMARY = "IETF draft Interactice Connectivity Establishment standard"
|
||||
DESCRIPTION = "Libnice is an implementation of the IETF's draft Interactice Connectivity Establishment standard (ICE)."
|
||||
HOMEPAGE = "http://nice.freedesktop.org/wiki/"
|
||||
SRC_URI = "http://nice.freedesktop.org/releases/libnice-${PV}.tar.gz"
|
||||
|
||||
LICENSE = "LGPL/MPL"
|
||||
DEPENDS = "glib-2.0 gstreamer"
|
||||
|
||||
inherit autotools
|
||||
|
||||
FILES_${PN} += "${libdir}/gstreamer-0.10/*.so"
|
||||
FILES_${PN}-dev += "${libdir}/gstreamer-0.10/*a"
|
||||
FILES_${PN}-dbg += "${libdir}/gstreamer-0.10/.debug"
|
||||
|
||||
do_compile_append() {
|
||||
for i in $(find ${S} -name "*.pc") ; do
|
||||
sed -i -e s:${STAGING_DIR_TARGET}::g \
|
||||
-e s:/${TARGET_SYS}::g \
|
||||
$i
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
---
|
||||
configure.ac | 1 +
|
||||
1 file changed, 1 insertion(+)
|
||||
|
||||
--- libetpan-0.54.orig/configure.ac
|
||||
+++ libetpan-0.54/configure.ac
|
||||
@@ -104,10 +104,11 @@ if test "$have_w32_system" = yes; then
|
||||
fi
|
||||
AM_CONDITIONAL(HAVE_MINGW32_SYSTEM, test "$have_w32_system" = yes)
|
||||
|
||||
# Check the C compiler.
|
||||
AC_PROG_CC
|
||||
+AC_PROG_CXX
|
||||
|
||||
# Compiler flags.
|
||||
AC_ARG_ENABLE(debug, [ --enable-debug setup flags (gcc) for debugging (default=no)],
|
||||
if test "x$GCC" = xyes; then
|
||||
CFLAGS="$CFLAGS -O2 -g"
|
||||
20
meta-demoapps/recipes-connectivity/libetpan/libetpan_0.54.bb
Normal file
20
meta-demoapps/recipes-connectivity/libetpan/libetpan_0.54.bb
Normal file
@@ -0,0 +1,20 @@
|
||||
SUMMARY = "Library for communicating with mail and news services"
|
||||
DESCRIPTION = "libetpan is a library for communicating with mail and news servers. \
|
||||
It supports the protocols SMTP, POP3, IMAP and NNTP."
|
||||
HOMEPAGE = "http://www.etpan.org"
|
||||
SECTION = "libs"
|
||||
DEPENDS = "curl expat gnutls"
|
||||
LICENSE = "BSD"
|
||||
PR = "r1"
|
||||
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/libetpan/libetpan-${PV}.tar.gz \
|
||||
file://cxx-is-here.patch;patch=1"
|
||||
|
||||
inherit autotools pkgconfig gettext binconfig
|
||||
|
||||
EXTRA_OECONF = "--without-openssl --with-gnutls --disable-db"
|
||||
|
||||
PARALLEL_MAKE = ""
|
||||
|
||||
FILES_${PN} = "${libdir}/lib*.so.*"
|
||||
FILES_${PN}-dev = "${bindir} ${includedir} ${libdir}/lib*.so ${libdir}/*.la ${libdir}/*.a ${libdir}/pkgconfig"
|
||||
@@ -0,0 +1,10 @@
|
||||
SUMMARY = "XMPP/Jabber library"
|
||||
DESCRIPTION = "Loudmouth is a lightweight and easy-to-use C library for programming with the XMPP/Jabber protocol."
|
||||
HOMEPAGE = "http://www.loudmouth-project.org/"
|
||||
LICENSE = "LGPL"
|
||||
DEPENDS = "glib-2.0 gnutls libcheck"
|
||||
PR = "r2"
|
||||
|
||||
SRC_URI = "http://ftp.imendio.com/pub/imendio/${BPN}/src/${BPN}-${PV}.tar.bz2"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
@@ -0,0 +1,15 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
Index: openswan-2.4.7/Makefile.inc
|
||||
===================================================================
|
||||
--- openswan-2.4.7.orig/Makefile.inc 2006-12-25 18:05:40.608503250 +0100
|
||||
+++ openswan-2.4.7/Makefile.inc 2006-12-25 18:06:39.028154250 +0100
|
||||
@@ -158,7 +158,7 @@
|
||||
# how backup names are composed.
|
||||
# Note that the install procedures will never overwrite an existing config
|
||||
# file, which is why -b is not specified for them.
|
||||
-INSTBINFLAGS=-b --suffix=.old
|
||||
+INSTBINFLAGS=
|
||||
INSTSUIDFLAGS=--mode=u+rxs,g+rx,o+rx --group=root -b --suffix=.old
|
||||
INSTMANFLAGS=
|
||||
INSTCONFFLAGS=
|
||||
@@ -0,0 +1,28 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
--- openswan-2.2.0.orig/programs/Makefile.program 2004-06-03 03:06:27.000000000 +0200
|
||||
+++ openswan-2.2.0/programs/Makefile.program 2005-03-05 13:50:19.000000000 +0100
|
||||
@@ -30,10 +30,6 @@
|
||||
|
||||
CFLAGS+= ${WERROR}
|
||||
|
||||
-ifneq ($(LD_LIBRARY_PATH),)
|
||||
-LDFLAGS=-L$(LD_LIBRARY_PATH)
|
||||
-endif
|
||||
-
|
||||
MANDIR8=$(MANTREE)/man8
|
||||
MANDIR5=$(MANTREE)/man5
|
||||
|
||||
--- openswan-2.2.0.orig/programs/pluto/Makefile 2005-01-03 20:40:45.000000000 +0100
|
||||
+++ openswan-2.2.0/programs/pluto/Makefile 2005-03-05 13:51:21.000000000 +0100
|
||||
@@ -234,10 +234,6 @@
|
||||
LIBSPLUTO+=${CURL_LIBS}
|
||||
LIBSPLUTO+= -lgmp -lresolv # -lefence
|
||||
|
||||
-ifneq ($(LD_LIBRARY_PATH),)
|
||||
-LDFLAGS=-L$(LD_LIBRARY_PATH)
|
||||
-endif
|
||||
-
|
||||
LIBSADNS = $(OPENSWANLIB)
|
||||
LIBSADNS += -lresolv # -lefence
|
||||
|
||||
@@ -0,0 +1,379 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/doc/Makefile openswan-2.4.7/doc/Makefile
|
||||
--- openswan-2.4.7.orig/doc/Makefile 2005-11-08 23:32:45.000000000 +0200
|
||||
+++ openswan-2.4.7/doc/Makefile 2006-12-06 22:46:54.732830840 +0200
|
||||
@@ -1,6 +1,6 @@
|
||||
# Makefile to generate various formats from HTML source
|
||||
#
|
||||
-# Assumes the htmldoc utility is available.
|
||||
+# No longer cares if the htmldoc utility is available.
|
||||
# This can be downloaded from www.easysw.com
|
||||
#
|
||||
# Also needs lynx(1) for HTML-to-text conversion
|
||||
diff -Nru openswan-2.4.7.orig/lib/libcrypto/libdes/asm/crypt586.pl openswan-2.4.7/lib/libcrypto/libdes/asm/crypt586.pl
|
||||
--- openswan-2.4.7.orig/lib/libcrypto/libdes/asm/crypt586.pl 2004-07-16 03:24:45.000000000 +0300
|
||||
+++ openswan-2.4.7/lib/libcrypto/libdes/asm/crypt586.pl 2006-12-06 22:46:54.732830840 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
#
|
||||
# The inner loop instruction sequence and the IP/FP modifications are from
|
||||
# Svend Olaf Mikkelsen <svolaf@inet.uni-c.dk>
|
||||
diff -Nru openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/cbc.pl openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/cbc.pl
|
||||
--- openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/cbc.pl 2004-07-10 11:07:06.000000000 +0300
|
||||
+++ openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/cbc.pl 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
# void des_ncbc_encrypt(input, output, length, schedule, ivec, enc)
|
||||
# des_cblock (*input);
|
||||
diff -Nru openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86asm.pl openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86asm.pl
|
||||
--- openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86asm.pl 2004-07-10 11:07:06.000000000 +0300
|
||||
+++ openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86asm.pl 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
# require 'x86asm.pl';
|
||||
# &asm_init("cpp","des-586.pl");
|
||||
diff -Nru openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86ms.pl openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86ms.pl
|
||||
--- openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86ms.pl 2004-07-10 11:07:07.000000000 +0300
|
||||
+++ openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86ms.pl 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
package x86ms;
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86unix.pl openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86unix.pl
|
||||
--- openswan-2.4.7.orig/lib/libcrypto/libdes/asm/perlasm/x86unix.pl 2004-07-10 11:07:07.000000000 +0300
|
||||
+++ openswan-2.4.7/lib/libcrypto/libdes/asm/perlasm/x86unix.pl 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
package x86unix;
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/lib/liblwres/Makefile openswan-2.4.7/lib/liblwres/Makefile
|
||||
--- openswan-2.4.7.orig/lib/liblwres/Makefile 2004-12-18 20:13:34.000000000 +0200
|
||||
+++ openswan-2.4.7/lib/liblwres/Makefile 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -20,7 +20,7 @@
|
||||
CDEFINES = -g
|
||||
CWARNINGS = -Werror
|
||||
|
||||
-CFLAGS=${CINCLUDES} ${CDEFINES} ${CWARNINGS}
|
||||
+CFLAGS=${CINCLUDES} ${CDEFINES} ${CWARNINGS} $(USERCOMPILE)
|
||||
|
||||
VERSION="@(\#) openswan-hacking-9.3-for-osw2"
|
||||
LIBINTERFACE=2
|
||||
diff -Nru openswan-2.4.7.orig/linux/net/ipsec/des/asm/des-586.pl openswan-2.4.7/linux/net/ipsec/des/asm/des-586.pl
|
||||
--- openswan-2.4.7.orig/linux/net/ipsec/des/asm/des-586.pl 2004-07-10 11:06:50.000000000 +0300
|
||||
+++ openswan-2.4.7/linux/net/ipsec/des/asm/des-586.pl 2006-12-06 22:46:54.736831090 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
#
|
||||
# The inner loop instruction sequence and the IP/FP modifications are from
|
||||
# Svend Olaf Mikkelsen <svolaf@inet.uni-c.dk>
|
||||
diff -Nru openswan-2.4.7.orig/linux/net/ipsec/des/asm/des686.pl openswan-2.4.7/linux/net/ipsec/des/asm/des686.pl
|
||||
--- openswan-2.4.7.orig/linux/net/ipsec/des/asm/des686.pl 2004-07-10 11:06:50.000000000 +0300
|
||||
+++ openswan-2.4.7/linux/net/ipsec/des/asm/des686.pl 2006-12-06 22:46:54.740831340 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
$prog="des686.pl";
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/linux/net/ipsec/des/asm/desboth.pl openswan-2.4.7/linux/net/ipsec/des/asm/desboth.pl
|
||||
--- openswan-2.4.7.orig/linux/net/ipsec/des/asm/desboth.pl 2004-07-10 11:06:50.000000000 +0300
|
||||
+++ openswan-2.4.7/linux/net/ipsec/des/asm/desboth.pl 2006-12-06 22:46:54.740831340 +0200
|
||||
@@ -1,4 +1,4 @@
|
||||
-#!/usr/local/bin/perl
|
||||
+#!/usr/bin/perl
|
||||
|
||||
$L="edi";
|
||||
$R="esi";
|
||||
diff -Nru openswan-2.4.7.orig/Makefile.inc openswan-2.4.7/Makefile.inc
|
||||
--- openswan-2.4.7.orig/Makefile.inc 2006-11-14 19:56:09.000000000 +0200
|
||||
+++ openswan-2.4.7/Makefile.inc 2006-12-06 22:48:32.534943089 +0200
|
||||
@@ -46,7 +46,7 @@
|
||||
DESTDIR?=
|
||||
|
||||
# "local" part of tree, used in building other pathnames
|
||||
-INC_USRLOCAL=/usr/local
|
||||
+INC_USRLOCAL?=/usr
|
||||
|
||||
# PUBDIR is where the "ipsec" command goes; beware, many things define PATH
|
||||
# settings which are assumed to include it (or at least, to include *some*
|
||||
@@ -80,7 +80,7 @@
|
||||
MANPLACES=man3 man5 man8
|
||||
|
||||
# where configuration files go
|
||||
-FINALCONFFILE?=/etc/ipsec.conf
|
||||
+FINALCONFFILE?=/etc/ipsec/ipsec.conf
|
||||
CONFFILE=$(DESTDIR)$(FINALCONFFILE)
|
||||
|
||||
FINALCONFDIR?=/etc
|
||||
@@ -91,7 +91,7 @@
|
||||
|
||||
# sample configuration files go into
|
||||
INC_DOCDIR?=share/doc
|
||||
-FINALEXAMPLECONFDIR=${INC_USRLOCAL}/${INC_DOCDIR}/openswan
|
||||
+FINALEXAMPLECONFDIR?=${INC_USRLOCAL}/${INC_DOCDIR}/openswan
|
||||
EXAMPLECONFDIR=${DESTDIR}${FINALEXAMPLECONFDIR}
|
||||
|
||||
FINALDOCDIR?=${INC_USRLOCAL}/${INC_DOCDIR}/openswan
|
||||
@@ -239,7 +239,7 @@
|
||||
# installed one in RH 7.2, won't work - you wind up depending upon
|
||||
# openssl.
|
||||
|
||||
-BIND9STATICLIBDIR?=/usr/local/lib
|
||||
+BIND9STATICLIBDIR?=/usr/lib
|
||||
|
||||
# if you install elsewere, you may need to point the include files to it.
|
||||
#BIND9STATICLIBDIR?=/sandel/lib
|
||||
diff -Nru openswan-2.4.7.orig/programs/barf/barf.in openswan-2.4.7/programs/barf/barf.in
|
||||
--- openswan-2.4.7.orig/programs/barf/barf.in 2006-11-07 05:49:18.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/barf/barf.in 2006-12-06 22:46:54.740831340 +0200
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
LOGS=${LOGS-/var/log}
|
||||
CONFS=${IPSEC_CONFS-/etc}
|
||||
-CONFDDIR=${IPSEC_CONFDDIR-/etc/ipsec.d}
|
||||
+CONFDDIR=${IPSEC_CONFDDIR-/etc/ipsec/ipsec.d}
|
||||
me="ipsec barf"
|
||||
# Max lines to use for things like 'route -n'
|
||||
maxlines=100
|
||||
@@ -238,13 +238,13 @@
|
||||
done
|
||||
fi
|
||||
_________________________ ipsec/ls-libdir
|
||||
-ls -l ${IPSEC_LIBDIR-/usr/local/lib/ipsec}
|
||||
+ls -l ${IPSEC_LIBDIR-/usr/lib/ipsec}
|
||||
_________________________ ipsec/ls-execdir
|
||||
-ls -l ${IPSEC_EXECDIR-/usr/local/libexec/ipsec}
|
||||
+ls -l ${IPSEC_EXECDIR-/usr/libexec/ipsec}
|
||||
_________________________ ipsec/updowns
|
||||
-for f in `ls ${IPSEC_EXECDIR-/usr/local/libexec/ipsec} | egrep updown`
|
||||
+for f in `ls ${IPSEC_EXECDIR-/usr/libexec/ipsec} | egrep updown`
|
||||
do
|
||||
- cat ${IPSEC_EXECDIR-/usr/local/libexec/ipsec}/$f
|
||||
+ cat ${IPSEC_EXECDIR-/usr/libexec/ipsec}/$f
|
||||
done
|
||||
_________________________ /proc/net/dev
|
||||
cat /proc/net/dev
|
||||
diff -Nru openswan-2.4.7.orig/programs/eroute/eroute.5 openswan-2.4.7/programs/eroute/eroute.5
|
||||
--- openswan-2.4.7.orig/programs/eroute/eroute.5 2006-10-26 23:40:43.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/eroute/eroute.5 2006-12-06 22:57:19.307864340 +0200
|
||||
@@ -168,7 +168,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_eroute, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_eroute, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/eroute/eroute.8 openswan-2.4.7/programs/eroute/eroute.8
|
||||
--- openswan-2.4.7.orig/programs/eroute/eroute.8 2003-10-31 04:32:27.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/eroute/eroute.8 2006-12-06 22:46:54.740831340 +0200
|
||||
@@ -308,7 +308,7 @@
|
||||
.br
|
||||
.LP
|
||||
.SH FILES
|
||||
-/proc/net/ipsec_eroute, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_eroute, /usr/bin/ipsec
|
||||
.SH "SEE ALSO"
|
||||
ipsec(8), ipsec_manual(8), ipsec_tncfg(8), ipsec_spi(8),
|
||||
ipsec_spigrp(8), ipsec_klipsdebug(8), ipsec_eroute(5)
|
||||
diff -Nru openswan-2.4.7.orig/programs/_include/_include.in openswan-2.4.7/programs/_include/_include.in
|
||||
--- openswan-2.4.7.orig/programs/_include/_include.in 2003-01-06 23:44:04.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/_include/_include.in 2006-12-06 22:46:54.740831340 +0200
|
||||
@@ -47,10 +47,10 @@
|
||||
do
|
||||
if test ! -r "$f"
|
||||
then
|
||||
- if test ! "$f" = "/etc/ipsec.conf"
|
||||
+ if test ! "$f" = "/etc/ipsec/ipsec.conf"
|
||||
then
|
||||
echo "#:cannot open configuration file \'$f\'"
|
||||
- if test "$f" = "/etc/ipsec.secrets"
|
||||
+ if test "$f" = "/etc/ipsec/ipsec.secrets"
|
||||
then
|
||||
echo "#:Your secrets file will be created when you start FreeS/WAN for the first time."
|
||||
fi
|
||||
diff -Nru openswan-2.4.7.orig/programs/ipsec/ipsec.8 openswan-2.4.7/programs/ipsec/ipsec.8
|
||||
--- openswan-2.4.7.orig/programs/ipsec/ipsec.8 2003-02-27 18:51:54.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/ipsec/ipsec.8 2006-12-06 22:46:54.744831590 +0200
|
||||
@@ -81,7 +81,7 @@
|
||||
.I ipsec
|
||||
thinks the IPsec configuration files are stored.
|
||||
.SH FILES
|
||||
-/usr/local/lib/ipsec usual utilities directory
|
||||
+/usr/lib/ipsec usual utilities directory
|
||||
.SH ENVIRONMENT
|
||||
.PP
|
||||
The following environment variables control where FreeS/WAN finds its
|
||||
diff -Nru openswan-2.4.7.orig/programs/klipsdebug/klipsdebug.5 openswan-2.4.7/programs/klipsdebug/klipsdebug.5
|
||||
--- openswan-2.4.7.orig/programs/klipsdebug/klipsdebug.5 2006-10-27 01:21:25.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/klipsdebug/klipsdebug.5 2006-12-06 22:58:04.150666840 +0200
|
||||
@@ -114,7 +114,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_klipsdebug, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_klipsdebug, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/klipsdebug/klipsdebug.8 openswan-2.4.7/programs/klipsdebug/klipsdebug.8
|
||||
--- openswan-2.4.7.orig/programs/klipsdebug/klipsdebug.8 2006-10-27 01:21:25.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/klipsdebug/klipsdebug.8 2006-12-06 22:58:22.295800840 +0200
|
||||
@@ -111,7 +111,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_klipsdebug, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_klipsdebug, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/mailkey/mailkey.in openswan-2.4.7/programs/mailkey/mailkey.in
|
||||
--- openswan-2.4.7.orig/programs/mailkey/mailkey.in 2006-10-29 02:49:23.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/mailkey/mailkey.in 2006-12-06 22:46:54.828836839 +0200
|
||||
@@ -60,7 +60,7 @@
|
||||
|
||||
"$test1st"
|
||||
|
||||
-Common concerns: This account must be able to read /etc/ipsec.secrets.
|
||||
+Common concerns: This account must be able to read /etc/ipsec/ipsec.secrets.
|
||||
If you haven't generated your key yet, please run 'ipsec newhostkey'."
|
||||
exit 0
|
||||
}
|
||||
diff -Nru openswan-2.4.7.orig/programs/pluto/Makefile openswan-2.4.7/programs/pluto/Makefile
|
||||
--- openswan-2.4.7.orig/programs/pluto/Makefile 2006-11-07 17:55:52.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/pluto/Makefile 2006-12-06 22:46:54.832837088 +0200
|
||||
@@ -256,7 +256,7 @@
|
||||
-DPOLICYGROUPSDIR=\"${FINALCONFDDIR}/policies\" \
|
||||
-DPERPEERLOGDIR=\"${FINALLOGDIR}/pluto/peer\"
|
||||
|
||||
-ALLFLAGS = $(CPPFLAGS) $(CFLAGS)
|
||||
+ALLFLAGS = $(CPPFLAGS) $(CFLAGS) $(USERCOMPILE)
|
||||
|
||||
# libefence is a free memory allocation debugger
|
||||
# Solaris 2 needs -lsocket -lnsl
|
||||
diff -Nru openswan-2.4.7.orig/programs/setup/Makefile openswan-2.4.7/programs/setup/Makefile
|
||||
--- openswan-2.4.7.orig/programs/setup/Makefile 2004-12-18 20:13:43.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/setup/Makefile 2006-12-06 22:46:54.832837088 +0200
|
||||
@@ -33,25 +33,10 @@
|
||||
@rm -f $(BINDIR)/setup
|
||||
@$(INSTALL) $(INSTBINFLAGS) setup $(RCDIR)/ipsec
|
||||
@ln -s $(FINALRCDIR)/ipsec $(BINDIR)/setup
|
||||
- -@for i in 0 1 2 3 4 5 6; do mkdir -p $(RCDIR)/../rc$$i.d; done
|
||||
- -@cd $(RCDIR)/../rc0.d && ln -f -s ../init.d/ipsec K76ipsec
|
||||
- -@cd $(RCDIR)/../rc1.d && ln -f -s ../init.d/ipsec K76ipsec
|
||||
- -@cd $(RCDIR)/../rc2.d && ln -f -s ../init.d/ipsec S47ipsec
|
||||
- -@cd $(RCDIR)/../rc3.d && ln -f -s ../init.d/ipsec S47ipsec
|
||||
- -@cd $(RCDIR)/../rc4.d && ln -f -s ../init.d/ipsec S47ipsec
|
||||
- -@cd $(RCDIR)/../rc5.d && ln -f -s ../init.d/ipsec S47ipsec
|
||||
- -@cd $(RCDIR)/../rc6.d && ln -f -s ../init.d/ipsec K76ipsec
|
||||
|
||||
install_file_list::
|
||||
@echo $(RCDIR)/ipsec
|
||||
@echo $(BINDIR)/setup
|
||||
- @echo $(RCDIR)/../rc0.d/K76ipsec
|
||||
- @echo $(RCDIR)/../rc1.d/K76ipsec
|
||||
- @echo $(RCDIR)/../rc2.d/S47ipsec
|
||||
- @echo $(RCDIR)/../rc3.d/S47ipsec
|
||||
- @echo $(RCDIR)/../rc4.d/S47ipsec
|
||||
- @echo $(RCDIR)/../rc5.d/S47ipsec
|
||||
- @echo $(RCDIR)/../rc6.d/K76ipsec
|
||||
|
||||
clean::
|
||||
@rm -f setup
|
||||
diff -Nru openswan-2.4.7.orig/programs/showhostkey/showhostkey.in openswan-2.4.7/programs/showhostkey/showhostkey.in
|
||||
--- openswan-2.4.7.orig/programs/showhostkey/showhostkey.in 2004-11-14 15:40:41.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/showhostkey/showhostkey.in 2006-12-06 22:46:54.844837840 +0200
|
||||
@@ -18,7 +18,7 @@
|
||||
usage="Usage: $me [--file secrets] [--left] [--right] [--txt gateway] [--id id]
|
||||
[--dhclient] [--ipseckey]"
|
||||
|
||||
-file=/etc/ipsec.secrets
|
||||
+file=/etc/ipsec/ipsec.secrets
|
||||
fmt=""
|
||||
gw=
|
||||
id=
|
||||
diff -Nru openswan-2.4.7.orig/programs/spi/spi.5 openswan-2.4.7/programs/spi/spi.5
|
||||
--- openswan-2.4.7.orig/programs/spi/spi.5 2006-10-26 23:53:59.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/spi/spi.5 2006-12-06 23:00:11.910340779 +0200
|
||||
@@ -157,7 +157,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_spi, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_spi, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/spi/spi.8 openswan-2.4.7/programs/spi/spi.8
|
||||
--- openswan-2.4.7.orig/programs/spi/spi.8 2006-10-30 22:00:04.000000000 +0200
|
||||
+++ openswan-2.4.7/programs/spi/spi.8 2006-12-06 23:00:27.043286530 +0200
|
||||
@@ -215,7 +215,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_spi, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_spi, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/spigrp/spigrp.5 openswan-2.4.7/programs/spigrp/spigrp.5
|
||||
--- openswan-2.4.7.orig/programs/spigrp/spigrp.5 2006-10-26 23:50:29.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/spigrp/spigrp.5 2006-12-06 23:01:25.650949280 +0200
|
||||
@@ -67,7 +67,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_spigrp, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_spigrp, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/spigrp/spigrp.8 openswan-2.4.7/programs/spigrp/spigrp.8
|
||||
--- openswan-2.4.7.orig/programs/spigrp/spigrp.8 2006-10-26 23:50:29.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/spigrp/spigrp.8 2006-12-06 23:01:39.079788532 +0200
|
||||
@@ -87,7 +87,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_spigrp, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_spigrp, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/tncfg/tncfg.5 openswan-2.4.7/programs/tncfg/tncfg.5
|
||||
--- openswan-2.4.7.orig/programs/tncfg/tncfg.5 2006-10-26 23:58:11.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/tncfg/tncfg.5 2006-12-06 23:01:59.385057530 +0200
|
||||
@@ -101,7 +101,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_tncfg, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_tncfg, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
diff -Nru openswan-2.4.7.orig/programs/tncfg/tncfg.8 openswan-2.4.7/programs/tncfg/tncfg.8
|
||||
--- openswan-2.4.7.orig/programs/tncfg/tncfg.8 2006-10-26 23:58:11.000000000 +0300
|
||||
+++ openswan-2.4.7/programs/tncfg/tncfg.8 2006-12-06 23:02:09.245673780 +0200
|
||||
@@ -63,7 +63,7 @@
|
||||
.SH "FILES"
|
||||
|
||||
.PP
|
||||
-/proc/net/ipsec_tncfg, /usr/local/bin/ipsec
|
||||
+/proc/net/ipsec_tncfg, /usr/bin/ipsec
|
||||
|
||||
.SH "SEE ALSO"
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
SECTION = "console/network"
|
||||
SUMMARY = "IPsec implementation"
|
||||
DESCRIPTION = "Openswan is an Open Source implementation of IPsec for the \
|
||||
Linux operating system."
|
||||
HOMEPAGE = "http://www.openswan.org"
|
||||
LICENSE = "GPLv2"
|
||||
DEPENDS = "gmp flex-native"
|
||||
RRECOMMENDS_${PN} = "kernel-module-ipsec"
|
||||
PR = "r2"
|
||||
|
||||
SRC_URI = "http://www.openswan.org/download/old/openswan-${PV}.tar.gz \
|
||||
file://openswan-2.4.7-gentoo.patch;patch=1 \
|
||||
file://installflags.patch;patch=1 \
|
||||
file://ld-library-path-breakage.patch;patch=1"
|
||||
S = "${WORKDIR}/openswan-${PV}"
|
||||
|
||||
PARALLEL_MAKE = ""
|
||||
EXTRA_OEMAKE = "DESTDIR=${D} \
|
||||
USERCOMPILE="${CFLAGS}" \
|
||||
FINALCONFDIR=${sysconfdir}/ipsec \
|
||||
INC_RCDEFAULT=${sysconfdir}/init.d \
|
||||
INC_USRLOCAL=${prefix} \
|
||||
INC_MANDIR=share/man WERROR=''"
|
||||
|
||||
do_compile () {
|
||||
oe_runmake programs
|
||||
}
|
||||
|
||||
do_install () {
|
||||
oe_runmake install
|
||||
}
|
||||
|
||||
FILES_${PN} = "${sysconfdir} ${libdir}/ipsec/* ${sbindir}/* ${libexecdir}/ipsec/*"
|
||||
FILES_${PN}-dbg += "${libdir}/ipsec/.debug ${libexecdir}/ipsec/.debug"
|
||||
|
||||
CONFFILES_${PN} = "${sysconfdir}/ipsec/ipsec.conf"
|
||||
@@ -0,0 +1,20 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
---
|
||||
cmake/OpenSyncInternal.cmake.in | 1 -
|
||||
1 file changed, 1 deletion(-)
|
||||
|
||||
--- libopensync-0.36.orig/cmake/OpenSyncInternal.cmake.in
|
||||
+++ libopensync-0.36/cmake/OpenSyncInternal.cmake.in
|
||||
@@ -5,11 +5,10 @@ INCLUDE( OpenSyncTesting )
|
||||
INCLUDE( OpenSyncPackaging )
|
||||
INCLUDE( OpenSyncPlugin )
|
||||
INCLUDE( OpenSyncPlatforms )
|
||||
INCLUDE( MacroEnsureOutOfSourceBuild )
|
||||
|
||||
-MACRO_ENSURE_OUT_OF_SOURCE_BUILD("${CMAKE_PROJECT_NAME} doesn't allow to build within the source directory. Please, create a seperate build directory and run 'cmake ${PROJECT_SOURCE_DIR} [options]'!")
|
||||
|
||||
|
||||
SET( OPENSYNC_PLUGINDIR "@OPENSYNC_PLUGINDIR@" )
|
||||
SET( OPENSYNC_FORMATSDIR "@OPENSYNC_FORMATSDIR@" )
|
||||
SET( OPENSYNC_PYTHON_PLUGINDIR "@OPENSYNC_PYTHON_PLUGINDIR@" )
|
||||
@@ -0,0 +1,19 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
---
|
||||
opensync/CMakeLists.txt | 1 -
|
||||
1 file changed, 1 deletion(-)
|
||||
|
||||
Index: libopensync-0.37/opensync/CMakeLists.txt
|
||||
===================================================================
|
||||
--- libopensync-0.37.orig/opensync/CMakeLists.txt 2008-10-15 13:56:45.000000000 +0100
|
||||
+++ libopensync-0.37/opensync/CMakeLists.txt 2008-10-15 13:59:03.000000000 +0100
|
||||
@@ -67,8 +67,6 @@
|
||||
SET_TARGET_PROPERTIES( opensync PROPERTIES VERSION ${OPENSYNC_LIBVERSION_VERSION} )
|
||||
SET_TARGET_PROPERTIES( opensync PROPERTIES SOVERSION ${OPENSYNC_LIBVERSION_SOVERSION} )
|
||||
|
||||
-SET_TARGET_PROPERTIES( opensync PROPERTIES COMPILE_FLAGS ${SYMBOLS_VISIBILITY} )
|
||||
-
|
||||
IF ( OPENSYNC_UNITTESTS )
|
||||
ADD_LIBRARY( opensync-testing SHARED ${libopensync_LIB_SRCS} )
|
||||
TARGET_LINK_LIBRARIES( opensync-testing ${GLIB2_LIBRARIES} ${GTHREAD2_LIBRARIES} ${GMODULE2_LIBRARIES} ${LIBXML2_LIBRARIES} ${LIBXSLT_LIBRARIES} ${LIBEXSLT_LIBRARIES} ${SQLITE3_LIBRARIES} )
|
||||
@@ -0,0 +1,18 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
---
|
||||
CMakeLists.txt | 1 -
|
||||
1 file changed, 1 deletion(-)
|
||||
|
||||
Index: libopensync-0.37/CMakeLists.txt
|
||||
===================================================================
|
||||
--- libopensync-0.37.orig/CMakeLists.txt 2008-08-18 16:14:24.000000000 +0100
|
||||
+++ libopensync-0.37/CMakeLists.txt 2008-10-15 14:00:12.000000000 +0100
|
||||
@@ -65,7 +65,6 @@
|
||||
FIND_PACKAGE( LibXslt REQUIRED )
|
||||
FIND_PACKAGE( LibExslt REQUIRED )
|
||||
FIND_PACKAGE( SWIG )
|
||||
-FIND_PACKAGE( PythonLibs )
|
||||
FIND_PACKAGE( Check )
|
||||
|
||||
ADD_SUBDIRECTORY( opensync )
|
||||
@@ -0,0 +1,25 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
Index: libopensync-plugin-evolution2-0.36/cmake/modules/FindOpenSync.cmake
|
||||
===================================================================
|
||||
--- libopensync-plugin-evolution2-0.36.orig/cmake/modules/FindOpenSync.cmake 2008-10-20 13:07:14.000000000 +0100
|
||||
+++ libopensync-plugin-evolution2-0.36/cmake/modules/FindOpenSync.cmake 2008-10-20 13:08:09.000000000 +0100
|
||||
@@ -23,15 +23,15 @@
|
||||
ENDIF ( OpenSync_FIND_REQUIRED )
|
||||
|
||||
IF ( OPENSYNC_MIN_VERSION )
|
||||
- PKG_SEARCH_MODULE( OPENSYNC ${_pkgconfig_REQUIRED} opensync-1.0>=${OPENSYNC_MIN_VERSION} )
|
||||
+ PKG_SEARCH_MODULE( OPENSYNC ${_pkgconfig_REQUIRED} libopensync>=${OPENSYNC_MIN_VERSION} )
|
||||
ELSE ( OPENSYNC_MIN_VERSION )
|
||||
- PKG_SEARCH_MODULE( OPENSYNC ${_pkgconfig_REQUIRED} opensync-1.0 )
|
||||
+ PKG_SEARCH_MODULE( OPENSYNC ${_pkgconfig_REQUIRED} libopensync )
|
||||
ENDIF ( OPENSYNC_MIN_VERSION )
|
||||
|
||||
FIND_PROGRAM( PKGCONFIG_EXECUTABLE NAMES pkg-config )
|
||||
|
||||
IF ( PKGCONFIG_EXECUTABLE )
|
||||
- EXEC_PROGRAM( ${PKGCONFIG_EXECUTABLE} ARGS opensync-1.0 --variable=datadir OUTPUT_VARIABLE _opensync_data_DIR )
|
||||
+ EXEC_PROGRAM( ${PKGCONFIG_EXECUTABLE} ARGS libopensync --variable=datadir OUTPUT_VARIABLE _opensync_data_DIR )
|
||||
STRING( REGEX REPLACE "[\r\n]" " " _opensync_data_DIR "${_opensync_data_DIR}" )
|
||||
ENDIF ( PKGCONFIG_EXECUTABLE )
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "LGPL"
|
||||
|
||||
DEPENDS += " eds-dbus"
|
||||
|
||||
SRC_URI += "file://0.37-fixes.patch;patch=1"
|
||||
|
||||
PR = "r1"
|
||||
@@ -0,0 +1,2 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "LGPL"
|
||||
@@ -0,0 +1,2 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "GPLv2"
|
||||
@@ -0,0 +1,4 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "GPLv2"
|
||||
|
||||
DEPENDS += "openobex bluez4"
|
||||
@@ -0,0 +1,15 @@
|
||||
Upstream-Status: Inappropriate [others]
|
||||
|
||||
Index: libopensync-plugin-syncml-0.38/src/syncml_callbacks.c
|
||||
===================================================================
|
||||
--- libopensync-plugin-syncml-0.38.orig/src/syncml_callbacks.c 2009-07-31 10:30:33.000000000 +0100
|
||||
+++ libopensync-plugin-syncml-0.38/src/syncml_callbacks.c 2009-07-31 10:30:39.000000000 +0100
|
||||
@@ -405,7 +405,7 @@
|
||||
} else {
|
||||
/* This problem should be fixed with the next SLOW-SYNC. */
|
||||
osync_trace(TRACE_EXIT_ERROR, "%s - unexpected Add or Replace command", __func__);
|
||||
- smlErrorSet(error, SML_ERROR_TEMPORARY, "Unwanted Add or Replace command on second OMA DS session.");
|
||||
+ smlErrorSet(error, SML_ERROR_GENERIC, "Unwanted Add or Replace command on second OMA DS session.");
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "LGPL"
|
||||
|
||||
DEPENDS += " libsyncml (>= 0.4.7)"
|
||||
|
||||
SRC_URI += "file://fixerror.patch;patch=1"
|
||||
|
||||
PR = "r1"
|
||||
@@ -0,0 +1,4 @@
|
||||
require libopensync-plugin_0.36.inc
|
||||
LICENSE = "LGPL"
|
||||
|
||||
DEPENDS += "libcheck"
|
||||
@@ -0,0 +1,11 @@
|
||||
DEPENDS = "libopensync (>= 0.36)"
|
||||
|
||||
DESCRIPTION ?= "OpenSync plugin"
|
||||
|
||||
SRC_URI = "http://opensync.org/download/releases/${PV}/${BPN}-${PV}.tar.bz2"
|
||||
|
||||
inherit cmake
|
||||
|
||||
FILES_${PN} += "${libdir}/opensync*/plugins/*.so \
|
||||
${libdir}/opensync*/formats/*.so \
|
||||
${datadir}/opensync*/defaults/"
|
||||
@@ -0,0 +1,26 @@
|
||||
LICENSE = "LGPL"
|
||||
HOMEPAGE = "http://www.opensync.org/"
|
||||
SUMMARY = "Synchronization framwork"
|
||||
DESCRIPTION = "The OpenSync project is an ongoing effort to create a synchronization framework that will be a platform independent, general purpose synchronization engine utilizing modular plugins for content formats and different kind of connection types. OpenSync's modularity should allow it to be extended easily to new devices and purposes without radically changing the architecture itself, allowing it to support wide variety of devices used today and in the future."
|
||||
DEPENDS = "sqlite3 libxml2 glib-2.0 libcheck zlib libxslt"
|
||||
|
||||
PR = "r1"
|
||||
|
||||
SRC_URI = "http://opensync.org/download/releases/${PV}/libopensync-${PV}.tar.bz2\
|
||||
file://cmake.patch;patch=1 \
|
||||
file://build-in-src.patch;patch=1 \
|
||||
file://no-python-check.patch;patch=1"
|
||||
|
||||
inherit cmake pkgconfig
|
||||
|
||||
LEAD_SONAME = "libopensync.so"
|
||||
|
||||
FILES_${PN} += " ${libdir}/opensync*/formats/*.so \
|
||||
${libdir}/opensync*/osplugin \
|
||||
${datadir}/opensync*/schemas \
|
||||
${datadir}/opensync*/capabilities \
|
||||
${datadir}/opensync*/descriptions \
|
||||
"
|
||||
FILES_${PN}-dbg += " ${libdir}/opensync*/formats/.debug/*.so \
|
||||
${libdir}/opensync*/.debug/osplugin "
|
||||
|
||||
22
meta-demoapps/recipes-connectivity/opensync/libsync_git.bb
Normal file
22
meta-demoapps/recipes-connectivity/opensync/libsync_git.bb
Normal file
@@ -0,0 +1,22 @@
|
||||
SUMMARY = "GObject-based sync library"
|
||||
DESCRIPTION = "LibSync is a GObject-based framework for more convenient use of \
|
||||
OpenSync in GLib applications."
|
||||
LICENSE = "LGPLv2"
|
||||
SECTION = "x11"
|
||||
DEPENDS = "glib-2.0 gtk+ libglade libopensync avahi"
|
||||
RRECOMMENDS_${PN} = "\
|
||||
libopensync-plugin-file \
|
||||
"
|
||||
SRCREV = "3f375969d56028505db97cd25ef1679a167cfc59"
|
||||
PV = "0.0+gitr${SRCPV}"
|
||||
PR = "r2"
|
||||
|
||||
SRC_URI = "git://git.yoctoproject.org/sync;protocol=git"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
S = "${WORKDIR}/sync"
|
||||
|
||||
PACKAGES += "synctool"
|
||||
FILES_${PN} = "${libdir}/lib*.so.*"
|
||||
FILES_synctool = "${bindir} ${datadir}"
|
||||
@@ -0,0 +1,21 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
---
|
||||
CMakeLists.txt | 4 ----
|
||||
1 file changed, 4 deletions(-)
|
||||
|
||||
Index: libsyncml-0.5.4/CMakeLists.txt
|
||||
===================================================================
|
||||
--- libsyncml-0.5.4.orig/CMakeLists.txt 2009-07-31 09:56:16.000000000 +0100
|
||||
+++ libsyncml-0.5.4/CMakeLists.txt 2009-07-31 09:57:33.000000000 +0100
|
||||
@@ -72,10 +72,6 @@
|
||||
CHECK_TYPE_SIZE( uint32_t UINT32_T )
|
||||
CHECK_TYPE_SIZE( uint8_t UINT8_T )
|
||||
|
||||
-# ensure out od source build
|
||||
-INCLUDE( MacroEnsureOutOfSourceBuild )
|
||||
-MACRO_ENSURE_OUT_OF_SOURCE_BUILD("${CMAKE_PROJECT_NAME} doesn't allow to build within the source directory. Please, create a seperate build directory and run 'cmake ${PROJECT_SOURCE_DIR} [options]'!")
|
||||
-
|
||||
# find requirements
|
||||
SET ( GLIB2_MIN_VERSION "2.12" )
|
||||
SET ( OPENOBEX_MIN_VERSION "1.1" )
|
||||
@@ -0,0 +1,17 @@
|
||||
DESCRIPTION = "Libsyncml is a implementation of the SyncML protocol."
|
||||
HOMEPAGE = "http://libsyncml.opensync.org/"
|
||||
LICENSE = "LGPL"
|
||||
DEPENDS = "sed-native wbxml2 libsoup libxml2 bluez4 openobex libcheck"
|
||||
PR = "r1"
|
||||
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/libsyncml/0.5.4/libsyncml-0.5.4.tar.bz2 \
|
||||
file://build-in-src.patch;patch=1"
|
||||
|
||||
inherit cmake pkgconfig
|
||||
|
||||
PACKAGES += "${PN}-tools"
|
||||
|
||||
FILES_${PN}-tools = "${bindir}"
|
||||
FILES_${PN} = "${libdir}/*.so.*"
|
||||
|
||||
export VERBOSE="1"
|
||||
@@ -0,0 +1,20 @@
|
||||
SUMMARY = "Calendar (and other PIM data) synchronization program"
|
||||
DESCRIPTION = "msynctool is a program to synchronize calendars, \
|
||||
addressbooks and other PIM data between programs on your computer and \
|
||||
other computers, mobile devices, PDAs or cell phones. It uses the \
|
||||
OpenSync plugins when synchronizing data."
|
||||
HOMEPAGE = "http://www.opensync.org/"
|
||||
LICENSE = "GPLv2+"
|
||||
LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
|
||||
file://tools/msynctool.c;beginline=1;endline=20;md5=0b71ef245b75c74bff7d7ec58b9b4527"
|
||||
|
||||
DEPENDS = "libopensync glib-2.0"
|
||||
|
||||
SRC_URI = "http://www.opensync.org/download/releases/${PV}/msynctool-${PV}.tar.bz2"
|
||||
|
||||
inherit cmake pkgconfig
|
||||
|
||||
PR = "r0"
|
||||
|
||||
SRC_URI[md5sum] = "495c45d6f12d3523a736864b0ced6ce5"
|
||||
SRC_URI[sha256sum] = "4a903d4db05bf2f677a675ec47e9791da9b1752c9feda0026157e82aa97e372b"
|
||||
@@ -0,0 +1,41 @@
|
||||
Upstream-Status: Inappropriate [disable feature]
|
||||
|
||||
---
|
||||
Makefile.am | 2 +-
|
||||
configure.ac | 1 -
|
||||
help/Makefile.am | 1 -
|
||||
3 files changed, 1 insertion(+), 3 deletions(-)
|
||||
|
||||
--- empathy-0.23.4.orig/Makefile.am
|
||||
+++ empathy-0.23.4/Makefile.am
|
||||
@@ -1,6 +1,6 @@
|
||||
-SUBDIRS = tools extensions po data libempathy libempathy-gtk src docs help
|
||||
+SUBDIRS = tools extensions po data libempathy libempathy-gtk src
|
||||
|
||||
if HAVE_MEGAPHONE
|
||||
SUBDIRS += megaphone
|
||||
endif
|
||||
|
||||
--- empathy-0.23.4.orig/configure.ac
|
||||
+++ empathy-0.23.4/configure.ac
|
||||
@@ -55,11 +55,10 @@ AM_PROG_LIBTOOL
|
||||
AM_PROG_MKDIR_P
|
||||
AM_PATH_GLIB_2_0
|
||||
AC_PATH_XTRA
|
||||
IT_PROG_INTLTOOL([0.35.0])
|
||||
GTK_DOC_CHECK([1.3])
|
||||
-GNOME_DOC_INIT
|
||||
IDT_COMPILE_WARNINGS
|
||||
AC_PATH_PROG(DBUS_BINDING_TOOL, dbus-binding-tool)
|
||||
AC_PATH_PROG(GCONFTOOL, gconftool-2)
|
||||
AM_GCONF_SOURCE_2
|
||||
GLIB_GENMARSHAL=`$PKG_CONFIG glib-2.0 --variable=glib_genmarshal`
|
||||
--- empathy-0.23.4.orig/help/Makefile.am
|
||||
+++ empathy-0.23.4/help/Makefile.am
|
||||
@@ -1,6 +1,5 @@
|
||||
-include $(top_srcdir)/gnome-doc-utils.make
|
||||
|
||||
DOC_MODULE = empathy
|
||||
DOC_ENTITIES = legal.xml
|
||||
DOC_FIGURES = \
|
||||
figures/empathy-main-window.png \
|
||||
@@ -0,0 +1,29 @@
|
||||
SUMMARY = "a Telepathy based IM client"
|
||||
DESCRIPTION = "Instant messaging program supporting text, voice, video, file \
|
||||
transfers and interapplication communication over many different protocols, \
|
||||
include: AIM, MSN, Google Talk (Jabber/XMPP), Facebook, Yahoo!, Salut, \
|
||||
Gadu-Gadu, Groupwise, ICQ and QQ."
|
||||
HOMEPAGE = "http://blogs.gnome.org/view/xclaesse/2007/04/26/0"
|
||||
LICENSE = "GPL"
|
||||
DEPENDS = "libcanberra telepathy-farsight gnome-doc-utils python-native telepathy-python telepathy-mission-control libtelepathy telepathy-glib gtk+ gconf libglade eds-dbus"
|
||||
RDEPENDS_${PN} = "telepathy-mission-control"
|
||||
RRECOMMENDS_${PN} = "telepathy-gabble"
|
||||
PR = "r3"
|
||||
|
||||
inherit gnome
|
||||
|
||||
PARALLEL_MAKE = ""
|
||||
|
||||
EXTRA_OECONF += "--disable-scrollkeeper"
|
||||
|
||||
PACKAGES =+ "empathy-scrollkeeper-junk"
|
||||
FILES_empathy-scrollkeeper-junk = "/var/lib/scrollkeeper"
|
||||
|
||||
FILES_${PN} += "${datadir}/mission-control/profiles/*.profile \
|
||||
${datadir}/dbus-1/services/*.service \
|
||||
${datadir}/telepathy/managers/*.chandler \
|
||||
${datadir}/icons \
|
||||
${libdir}/python*"
|
||||
|
||||
FILES_${PN}-dbg += "${libdir}/python*/*/.debug"
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
SUMMARY = "Telepathy fasrsight"
|
||||
DESCRIPTION = "Glue library for telepathy media signalling and the media \
|
||||
streaming capabilities of Farsight2."
|
||||
HOMEPAGE = "http://telepathy.freedesktop.org/wiki/"
|
||||
DEPENDS = "glib-2.0 dbus telepathy-glib farsight2"
|
||||
LICENSE = "LGPLv2"
|
||||
|
||||
SRC_URI = "http://telepathy.freedesktop.org/releases/telepathy-farsight/${BPN}-${PV}.tar.gz \
|
||||
"
|
||||
|
||||
inherit autotools
|
||||
|
||||
EXTRA_OECONF = "--disable-python"
|
||||
|
||||
FILES_${PN} += "${datadir}/telepathy \
|
||||
${datadir}/dbus-1"
|
||||
@@ -0,0 +1,33 @@
|
||||
|
||||
[ConnectionManager]
|
||||
BusName=org.freedesktop.Telepathy.ConnectionManager.gabble
|
||||
ObjectPath=/org/freedesktop/Telepathy/ConnectionManager/gabble
|
||||
|
||||
[Protocol jabber]
|
||||
param-account=s required register
|
||||
param-password=s required register
|
||||
param-server=s
|
||||
param-resource=s
|
||||
param-priority=n
|
||||
param-port=q
|
||||
param-old-ssl=b
|
||||
param-register=b
|
||||
param-low-bandwidth=b
|
||||
param-https-proxy-server=s
|
||||
param-https-proxy-port=q
|
||||
param-fallback-conference-server=s
|
||||
param-stun-server=s
|
||||
param-stun-port=q
|
||||
param-ignore-ssl-errors=b
|
||||
param-alias=s
|
||||
param-mac=s
|
||||
param-btid=s
|
||||
default-resource=Telepathy
|
||||
default-priority=0
|
||||
default-port=5222
|
||||
default-old-ssl=false
|
||||
default-register=false
|
||||
default-low-bandwidth=false
|
||||
default-https-proxy-port=443
|
||||
default-stun-port=3478
|
||||
default-ignore-ssl-errors=false
|
||||
@@ -0,0 +1,18 @@
|
||||
SUMMARY = "Telepathy Jabber/XMPP connection manager"
|
||||
DESCRIPTION = "Telepathy implementation of the Jabber/XMPP protocols."
|
||||
HOMEPAGE = "http://telepathy.freedesktop.org/wiki/"
|
||||
DEPENDS = "glib-2.0 dbus loudmouth telepathy-glib dbus-glib"
|
||||
LICENSE = "LGPL"
|
||||
|
||||
# gabble.manager needs to get regenerated every release, so please don't copy it over blindly
|
||||
SRC_URI = "http://telepathy.freedesktop.org/releases/telepathy-gabble/${BPN}-${PV}.tar.gz \
|
||||
file://gabble.manager"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
do_compile_prepend() {
|
||||
cp ${WORKDIR}/gabble.manager ${S}/data/
|
||||
}
|
||||
|
||||
FILES_${PN} += "${datadir}/telepathy \
|
||||
${datadir}/dbus-1"
|
||||
@@ -0,0 +1,17 @@
|
||||
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
--- wbxml2-0.9.2/Makefile.am.old 2007-01-03 19:50:24.000000000 +0000
|
||||
+++ wbxml2-0.9.2/Makefile.am 2007-01-03 19:50:39.000000000 +0000
|
||||
@@ -24,9 +24,9 @@
|
||||
TODO\
|
||||
doxygen.h
|
||||
|
||||
-install-data-local:
|
||||
- $(mkinstalldirs) $(DESTDIR)$(wbxmldocdir)/manual
|
||||
- cp -Rp doc/* $(DESTDIR)$(wbxmldocdir)/manual
|
||||
+#install-data-local:
|
||||
+# $(mkinstalldirs) $(DESTDIR)$(wbxmldocdir)/manual
|
||||
+# cp -Rp doc/* $(DESTDIR)$(wbxmldocdir)/manual
|
||||
|
||||
dist-bz2: distdir
|
||||
sed -e "s/tar.gz/tar.bz2/g" $(distdir)/wbxml2.spec > $(distdir)/wbxml2.spec.aux
|
||||
24
meta-demoapps/recipes-connectivity/wbxml/wbxml2_0.9.2.bb
Normal file
24
meta-demoapps/recipes-connectivity/wbxml/wbxml2_0.9.2.bb
Normal file
@@ -0,0 +1,24 @@
|
||||
SUMMARY = "Parsing and Encoding of WBXML documents"
|
||||
DESCRIPTION = "The WBXML Library (aka libwbxml) contains a library and \
|
||||
its associated tools to Parse, Encode and Handle WBXML documents. The \
|
||||
WBXML format is a binary representation of XML, defined by the Wap \
|
||||
Forum, and used to reduce bandwidth in mobile communications."
|
||||
LICENSE = "GPLv2"
|
||||
|
||||
DEPENDS = "libxml2 sed-native expat"
|
||||
|
||||
SRC_URI = "${SOURCEFORGE_MIRROR}/wbxmllib/${BPN}-${PV}.tar.gz \
|
||||
file://no-doc-install.patch;patch=1"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
do_configure_append() {
|
||||
sed -i s:-I/usr/include::g Makefile
|
||||
sed -i s:-I/usr/include::g */Makefile
|
||||
}
|
||||
|
||||
PACKAGES += "${PN}-tools"
|
||||
|
||||
FILES_${PN}-tools = "${bindir}"
|
||||
FILES_${PN} = "${libdir}/*.so.*"
|
||||
|
||||
64
meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc
Normal file
64
meta-demoapps/recipes-gnome/abiword/abiword-2.5.inc
Normal file
@@ -0,0 +1,64 @@
|
||||
DESCRIPTION = "AbiWord is free word processing program similar to Microsoft(r) Word"
|
||||
HOMEPAGE = "http://www.abiword.org"
|
||||
SECTION = "x11/office"
|
||||
LICENSE = "GPLv2"
|
||||
DEPENDS = "perl-native wv libgsf libglade libfribidi jpeg libpng libxml2"
|
||||
RDEPENDS_${PN} = "glibc-gconv-ibm850 glibc-gconv-cp1252 \
|
||||
glibc-gconv-iso8859-15 glibc-gconv-iso8859-1"
|
||||
|
||||
SRC_URI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz"
|
||||
|
||||
#want 2.x from 2.x.y for the installation directory
|
||||
SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
|
||||
|
||||
FILES_${PN} += " \
|
||||
${datadir}/icons/* \
|
||||
${datadir}/abiword-${SHRT_VER}/glade \
|
||||
${datadir}/abiword-${SHRT_VER}/scripts \
|
||||
${datadir}/abiword-${SHRT_VER}/system.profile-en \
|
||||
${datadir}/abiword-${SHRT_VER}/system.profile-en_GB \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/normal.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/normal.awt-en_GB \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Employee-Directory.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Business-Report.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Fax-Coversheet.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Resume.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Two-Columns.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Memo.awt \
|
||||
${datadir}/abiword-${SHRT_VER}/templates/Press-Release.awt "
|
||||
|
||||
# ${datadir}/abiword-${SHRT_VER}/templates/A4.awt \
|
||||
# ${datadir}/abiword-${SHRT_VER}/templates/US-Letter.awt \
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
PARALLEL_MAKE = ""
|
||||
|
||||
EXTRA_OECONF = " --disable-pspell \
|
||||
--disable-spellcheck \
|
||||
--disable-printing \
|
||||
--disable-exports \
|
||||
--with-sys-wv"
|
||||
|
||||
# AbiWord configure.ac does not play nicely with autoreconf
|
||||
# so use the autogen.sh script that comes with AbiWord
|
||||
#
|
||||
do_configure() {
|
||||
cd ${S}
|
||||
export NOCONFIGURE="no"; ./autogen.sh
|
||||
oe_runconf
|
||||
}
|
||||
|
||||
|
||||
do_install_append() {
|
||||
install -d ${D}${datadir}/pixmaps/
|
||||
mv ${D}${datadir}/icons/* ${D}${datadir}/pixmaps/
|
||||
}
|
||||
|
||||
PACKAGES += " abiword-clipart abiword-icons abiword-strings abiword-systemprofiles abiword-templates "
|
||||
|
||||
FILES_abiword-clipart += "${datadir}/abiword-${SHRT_VER}/clipart"
|
||||
FILES_abiword-icons += "${datadir}/abiword-${SHRT_VER}/icons"
|
||||
FILES_abiword-strings += "${datadir}/abiword-${SHRT_VER}/AbiWord/strings"
|
||||
FILES_abiword-systemprofiles += "${datadir}/abiword-${SHRT_VER}/AbiWord/system.profile*"
|
||||
FILES_abiword-templates += "${datadir}/abiword-${SHRT_VER}/templates"
|
||||
@@ -0,0 +1,12 @@
|
||||
require abiword-2.5.inc
|
||||
|
||||
PR = "r1"
|
||||
|
||||
EXTRA_OECONF += "--enable-embedded"
|
||||
|
||||
S = "${WORKDIR}/abiword-${PV}"
|
||||
|
||||
RCONFLICTS = "abiword"
|
||||
RPROVIDES += "abiword"
|
||||
|
||||
|
||||
64
meta-demoapps/recipes-gnome/abiword/abiword.inc
Normal file
64
meta-demoapps/recipes-gnome/abiword/abiword.inc
Normal file
@@ -0,0 +1,64 @@
|
||||
DESCRIPTION = "AbiWord is free word processing program similar to Microsoft(r) Word"
|
||||
SECTION = "gnome/office"
|
||||
HOMEPAGE = "http://www.abiword.org""
|
||||
LICENSE = "GPLv2"
|
||||
|
||||
DEPENDS = "libgsf libglade libfribidi libxml2 wv"
|
||||
RRECOMMENDS_${PN} = "glibc-gconv-ibm850 glibc-gconv-cp1252 \
|
||||
glibc-gconv-iso8859-15 glibc-gconv-iso8859-1"
|
||||
|
||||
# Share this file between recipes that use cvs and that pull tarballs
|
||||
# (one day change this to differentiate automatically based on PV and set the
|
||||
# SRC_URI and S var directly)
|
||||
RELURI = "http://www.abiword.org/downloads/abiword/${PV}/source/abiword-${PV}.tar.gz"
|
||||
RELSRC = "${WORKDIR}/abiword-${PV}/abi"
|
||||
|
||||
SVNURI = "svn://svn.abisource.com/abiword/trunk;module=abiword;proto=http"
|
||||
SVNSRC = "${WORKDIR}/abi"
|
||||
|
||||
#want 2.x from 2.x.y for the installation directory
|
||||
SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
|
||||
|
||||
FILES_${PN} += " \
|
||||
${datadir}/icons/* \
|
||||
${datadir}/mime-info \
|
||||
${datadir}/abiword-${SHRT_VER}/glade \
|
||||
${datadir}/abiword-${SHRT_VER}/system.profile-en \
|
||||
${datadir}/abiword-${SHRT_VER}/system.profile-en-GB \
|
||||
"
|
||||
|
||||
# ${datadir}/abiword-${SHRT_VER}/templates/normal.awt \
|
||||
# ${datadir}/abiword-${SHRT_VER}/templates/normal.awt-en_GB \
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
PARALLEL_MAKE = ""
|
||||
|
||||
# This is a minimalistic AbiWord build
|
||||
#
|
||||
# if you are planning to build plugins, make sure to comment out the
|
||||
# --disable-exports options
|
||||
|
||||
EXTRA_OECONF = " --enable-embedded=poky \
|
||||
--disable-printing \
|
||||
--with-sys-wv \
|
||||
--disable-exports \
|
||||
--disable-spellcheck"
|
||||
|
||||
# TODO -- refactor this, so that instead of two strings and profiles
|
||||
# packages we have lots of locale packages, each with a set of strings and
|
||||
# a profile
|
||||
#
|
||||
PACKAGES += " abiword-strings abiword-systemprofiles"
|
||||
|
||||
FILES_abiword-strings += "${datadir}/abiword-${SHRT_VER}/strings"
|
||||
FILES_abiword-systemprofiles += "${datadir}/abiword-${SHRT_VER}/system.profile*"
|
||||
|
||||
# AbiWord configure.ac does not play nicely with autoreconf
|
||||
# so use the autogen.sh script that comes with AbiWord
|
||||
#
|
||||
do_configure() {
|
||||
cd ${S}
|
||||
export NOCONFIGURE="no"; ./autogen.sh
|
||||
oe_runconf
|
||||
}
|
||||
5
meta-demoapps/recipes-gnome/abiword/abiword_2.5.2.bb
Normal file
5
meta-demoapps/recipes-gnome/abiword/abiword_2.5.2.bb
Normal file
@@ -0,0 +1,5 @@
|
||||
require abiword-2.5.inc
|
||||
|
||||
PR = "r2"
|
||||
|
||||
RCONFLICTS_${PN} = "abiword-embedded"
|
||||
10
meta-demoapps/recipes-gnome/abiword/abiword_svn.bb
Normal file
10
meta-demoapps/recipes-gnome/abiword/abiword_svn.bb
Normal file
@@ -0,0 +1,10 @@
|
||||
require abiword.inc
|
||||
|
||||
SRCREV = "21818"
|
||||
PV="2.5.2+svnr${SRCPV}"
|
||||
PR = "r0"
|
||||
|
||||
SRC_URI = "${SVNURI}"
|
||||
|
||||
S = "${SVNSRC}"
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
---
|
||||
Makefile.am | 2 +-
|
||||
gcalctool/Makefile.am | 2 --
|
||||
2 files changed, 1 insertion(+), 3 deletions(-)
|
||||
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
Index: gcalctool-5.8.17/gcalctool/Makefile.am
|
||||
===================================================================
|
||||
--- gcalctool-5.8.17.orig/gcalctool/Makefile.am 2005-12-19 15:46:57.000000000 +0000
|
||||
+++ gcalctool-5.8.17/gcalctool/Makefile.am 2007-05-16 16:03:26.000000000 +0100
|
||||
@@ -1,8 +1,6 @@
|
||||
## Process this file with automake to produce Makefile.in
|
||||
|
||||
INCLUDES = \
|
||||
- -I$(includedir) \
|
||||
- -DG_DISABLE_DEPRECATED \
|
||||
-DGDK_PIXBUF_DISABLE_DEPRECATED \
|
||||
-DGDK_DISABLE_DEPRECATED \
|
||||
-DGNOME_DISABLE_DEPRECATED \
|
||||
Index: gcalctool-5.8.17/Makefile.am
|
||||
===================================================================
|
||||
--- gcalctool-5.8.17.orig/Makefile.am 2007-05-16 15:35:44.000000000 +0100
|
||||
+++ gcalctool-5.8.17/Makefile.am 2007-05-16 16:06:46.000000000 +0100
|
||||
@@ -29,7 +29,7 @@ DISTCLEANFILES = \
|
||||
gnome-doc-utils.make \
|
||||
gcalctool.desktop
|
||||
|
||||
-SUBDIRS = po gcalctool help
|
||||
+SUBDIRS = po gcalctool
|
||||
|
||||
SCHEMAS_FILE = gcalctool.schemas
|
||||
|
||||
13
meta-demoapps/recipes-gnome/gcalctool/gcalctool_5.7.32.bb
Normal file
13
meta-demoapps/recipes-gnome/gcalctool/gcalctool_5.7.32.bb
Normal file
@@ -0,0 +1,13 @@
|
||||
LICENSE = "GPL"
|
||||
SECTION = "x11"
|
||||
DEPENDS = "gtk+"
|
||||
DESCRIPTION = "gcalctool is a powerful calculator"
|
||||
PR = "r2"
|
||||
|
||||
SRC_URI = "http://download.gnome.org/sources/${BPN}/5.7/${BPN}-${PV}.tar.gz \
|
||||
file://makefile-fix.diff;patch=1\
|
||||
file://fix-includedir.patch;patch=1"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
EXTRA_OECONF = "--disable-gnome"
|
||||
12
meta-demoapps/recipes-gnome/gcalctool/gcalctool_5.8.17.bb
Normal file
12
meta-demoapps/recipes-gnome/gcalctool/gcalctool_5.8.17.bb
Normal file
@@ -0,0 +1,12 @@
|
||||
LICENSE = "GPL"
|
||||
SECTION = "x11"
|
||||
DEPENDS = "gtk+ gnome-doc-utils"
|
||||
DESCRIPTION = "gcalctool is a powerful calculator"
|
||||
PR = "r0"
|
||||
|
||||
SRC_URI = "http://download.gnome.org/sources/${BPN}/5.8/${BPN}-${PV}.tar.gz \
|
||||
file://fix-includedir.patch;patch=1"
|
||||
|
||||
inherit autotools pkgconfig
|
||||
|
||||
EXTRA_OECONF = "--disable-gnome"
|
||||
@@ -0,0 +1,32 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
Index: gnome-settings-daemon-2.26.1/configure.ac
|
||||
===================================================================
|
||||
--- gnome-settings-daemon-2.26.1.orig/configure.ac 2009-09-16 22:57:31.000000000 +0100
|
||||
+++ gnome-settings-daemon-2.26.1/configure.ac 2009-09-16 22:58:45.000000000 +0100
|
||||
@@ -133,9 +133,6 @@
|
||||
#
|
||||
AC_DEFUN([AC_CHECK_X_HEADERS], [
|
||||
ac_save_CPPFLAGS="$CPPFLAGS"
|
||||
- if test \! -z "$includedir" ; then
|
||||
- CPPFLAGS="$CPPFLAGS -I$includedir"
|
||||
- fi
|
||||
CPPFLAGS="$CPPFLAGS $X_CFLAGS"
|
||||
AC_CHECK_HEADERS([$1],[$2],[$3],[$4])
|
||||
CPPFLAGS="$ac_save_CPPFLAGS"])
|
||||
@@ -148,15 +145,9 @@
|
||||
ac_save_LDFLAGS="$LDFLAGS"
|
||||
# ac_save_LIBS="$LIBS"
|
||||
|
||||
- if test \! -z "$includedir" ; then
|
||||
- CPPFLAGS="$CPPFLAGS -I$includedir"
|
||||
- fi
|
||||
# note: $X_CFLAGS includes $x_includes
|
||||
CPPFLAGS="$CPPFLAGS $X_CFLAGS"
|
||||
|
||||
- if test \! -z "$libdir" ; then
|
||||
- LDFLAGS="$LDFLAGS -L$libdir"
|
||||
- fi
|
||||
# note: $X_LIBS includes $x_libraries
|
||||
LDFLAGS="$LDFLAGS $ALL_X_LIBS"
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
Upstream-Status: Inappropriate [configuration]
|
||||
|
||||
--- gnome-settings-daemon-2.26.1/data/gnome-settings-daemon.desktop.in.in~ 2009-04-24 20:59:51.000000000 -0700
|
||||
+++ gnome-settings-daemon-2.26.1/data/gnome-settings-daemon.desktop.in.in 2009-04-24 20:59:51.000000000 -0700
|
||||
@@ -2,7 +2,7 @@
|
||||
Type=Application
|
||||
_Name=GNOME Settings Daemon
|
||||
Exec=@LIBEXECDIR@/gnome-settings-daemon
|
||||
-OnlyShowIn=GNOME;
|
||||
+OnlyShowIn=GNOME;MOBLIN;
|
||||
X-GNOME-Autostart-Phase=Initialization
|
||||
X-GNOME-Autostart-Notify=true
|
||||
X-GNOME-AutoRestart=true
|
||||
@@ -0,0 +1,21 @@
|
||||
Upstream-Status: Pending
|
||||
|
||||
============================================================
|
||||
Listen for DeviceAdded in addition to DeviceEnabled
|
||||
|
||||
This should help address problems like left-handed
|
||||
mouse, and acceleration settings getting lost after
|
||||
resume, or when new devices gets plugged in.
|
||||
|
||||
diff --git a/plugins/mouse/gsd-mouse-manager.c b/plugins/mouse/gsd-mouse-manager.c
|
||||
--- a/plugins/mouse/gsd-mouse-manager.c
|
||||
+++ b/plugins/mouse/gsd-mouse-manager.c
|
||||
@@ -320,7 +320,7 @@ devicepresence_filter (GdkXEvent *xevent,
|
||||
if (xev->type == xi_presence)
|
||||
{
|
||||
XDevicePresenceNotifyEvent *dpn = (XDevicePresenceNotifyEvent *) xev;
|
||||
- if (dpn->devchange == DeviceEnabled)
|
||||
+ if (dpn->devchange == DeviceEnabled || dpn->devchange == DeviceAdded)
|
||||
set_mouse_settings ((GsdMouseManager *) data);
|
||||
}
|
||||
return GDK_FILTER_CONTINUE;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user