Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fae1c7a5fd |
13
.gitignore
vendored
@@ -1,17 +1,20 @@
|
||||
*.pyc
|
||||
*.pyo
|
||||
/*.patch
|
||||
build*/
|
||||
build*/conf/local.conf
|
||||
build*/conf/bblayers.conf
|
||||
build*/downloads
|
||||
build*/tmp/
|
||||
build*/sstate-cache
|
||||
pyshtables.py
|
||||
pstage/
|
||||
scripts/oe-git-proxy-socks
|
||||
sources/
|
||||
meta-*
|
||||
!meta-skeleton
|
||||
!meta-hob
|
||||
!meta-demoapps
|
||||
*.swp
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
!meta-yocto
|
||||
!meta-yocto-bsp
|
||||
|
||||
|
||||
|
||||
22
README
@@ -18,7 +18,7 @@ e.g. for the hardware support. Poky is in turn a component of the Yocto Project.
|
||||
|
||||
The Yocto Project has extensive documentation about the system including a
|
||||
reference manual which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
http://yoctoproject.org/community/documentation
|
||||
|
||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
||||
@@ -27,23 +27,3 @@ DISTRO = "") and contains only emulated machine support.
|
||||
For information about OpenEmbedded, see the OpenEmbedded website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
Where to Send Patches
|
||||
=====================
|
||||
|
||||
As Poky is an integration repository, patches against the various components
|
||||
should be sent to their respective upstreams.
|
||||
|
||||
bitbake:
|
||||
bitbake-devel@lists.openembedded.org
|
||||
|
||||
meta-yocto:
|
||||
poky@yoctoproject.org
|
||||
|
||||
Most everything else should be sent to the OpenEmbedded Core mailing list. If
|
||||
in doubt, check the oe-core git repository for the content you intend to modify.
|
||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
||||
repository.
|
||||
openembedded-core@lists.openembedded.org
|
||||
|
||||
Note: The scripts directory should be treated with extra care as it is a mix
|
||||
of oe-core and poky-specific files.
|
||||
|
||||
@@ -40,17 +40,9 @@ from bb import cooker
|
||||
from bb import ui
|
||||
from bb import server
|
||||
|
||||
__version__ = "1.16.0"
|
||||
__version__ = "1.15.1"
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
# Unbuffer stdout to avoid log truncation in the event
|
||||
# of an unorderly exit as well as to provide timely
|
||||
# updates to log files for use with tail
|
||||
try:
|
||||
if sys.stdout.name == '<stdout>':
|
||||
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
||||
except:
|
||||
pass
|
||||
|
||||
class BBConfiguration(object):
|
||||
"""
|
||||
@@ -64,11 +56,10 @@ class BBConfiguration(object):
|
||||
|
||||
|
||||
def get_ui(config):
|
||||
if not config.ui:
|
||||
# modify 'ui' attribute because it is also read by cooker
|
||||
config.ui = os.environ.get('BITBAKE_UI', 'knotty')
|
||||
|
||||
interface = config.ui
|
||||
if config.ui:
|
||||
interface = config.ui
|
||||
else:
|
||||
interface = 'knotty'
|
||||
|
||||
try:
|
||||
# Dynamically load the UI based on the ui name. Although we
|
||||
@@ -78,7 +69,7 @@ def get_ui(config):
|
||||
return getattr(module, interface).main
|
||||
except AttributeError:
|
||||
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
|
||||
"Valid interfaces: depexp, goggle, ncurses, hob, knotty [default], knotty2." % interface)
|
||||
|
||||
|
||||
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||
@@ -126,9 +117,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
|
||||
action = "store", dest = "cmd")
|
||||
|
||||
parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified cmd such as 'compile' and run the default task for the specified target(s)",
|
||||
action = "store", dest = "invalidate_stamp")
|
||||
|
||||
parser.add_option("-r", "--read", help = "read the specified file before bitbake.conf",
|
||||
action = "append", dest = "prefile", default = [])
|
||||
|
||||
@@ -150,13 +138,13 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
parser.add_option("-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
|
||||
action = "store_true", dest = "parse_only", default = False)
|
||||
|
||||
parser.add_option("-s", "--show-versions", help = "show current and preferred versions of all recipes",
|
||||
parser.add_option("-s", "--show-versions", help = "show current and preferred versions of all packages",
|
||||
action = "store_true", dest = "show_versions", default = False)
|
||||
|
||||
parser.add_option("-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
|
||||
action = "store_true", dest = "show_environment", default = False)
|
||||
|
||||
parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax, and the pn-buildlist to show the build list",
|
||||
parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
|
||||
action = "store_true", dest = "dot_graph", default = False)
|
||||
|
||||
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||
@@ -182,8 +170,6 @@ Default BBFILES are the .bb files in the current directory.""")
|
||||
|
||||
parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to",
|
||||
action = "store", dest = "bind", default = False)
|
||||
parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks, forces builds",
|
||||
action = "store_true", dest = "nosetscene", default = False)
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
configuration = BBConfiguration(options)
|
||||
|
||||
@@ -1,102 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# bitbake-diffsigs
|
||||
# BitBake task signature data comparison utility
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import fnmatch
|
||||
import optparse
|
||||
import logging
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||
|
||||
import bb.tinfoil
|
||||
import bb.siggen
|
||||
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
def find_compare_task(bbhandler, pn, taskname):
|
||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
||||
|
||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
||||
logger.error('Metadata does not support finding signature data files')
|
||||
sys.exit(1)
|
||||
|
||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-2:]
|
||||
if not latestfiles:
|
||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
elif len(latestfiles) < 2:
|
||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Define recursion callback
|
||||
def recursecb(key, hash1, hash2):
|
||||
hashes = [hash1, hash2]
|
||||
hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data)
|
||||
|
||||
recout = []
|
||||
if len(hashfiles) == 2:
|
||||
out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb)
|
||||
recout.extend(list(' ' + l for l in out2))
|
||||
else:
|
||||
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
|
||||
|
||||
return recout
|
||||
|
||||
# Recurse into signature comparison
|
||||
output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb)
|
||||
if output:
|
||||
print '\n'.join(output)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
|
||||
parser = optparse.OptionParser(
|
||||
usage = """
|
||||
%prog -t recipename taskname
|
||||
%prog sigdatafile1 sigdatafile2
|
||||
%prog sigdatafile1""")
|
||||
|
||||
parser.add_option("-t", "--task",
|
||||
help = "find the signature data files for last two runs of the specified task and compare them",
|
||||
action="store_true", dest="taskmode")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
|
||||
if len(args) == 1:
|
||||
parser.print_help()
|
||||
if len(sys.argv) > 2:
|
||||
bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
|
||||
else:
|
||||
tinfoil = bb.tinfoil.Tinfoil()
|
||||
if options.taskmode:
|
||||
if len(args) < 3:
|
||||
logger.error("Please specify a recipe and task name")
|
||||
sys.exit(1)
|
||||
tinfoil.prepare(config_only = True)
|
||||
find_compare_task(tinfoil, args[1], args[2])
|
||||
else:
|
||||
if len(args) == 2:
|
||||
output = bb.siggen.dump_sigfile(sys.argv[1])
|
||||
else:
|
||||
output = bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
|
||||
|
||||
if output:
|
||||
print '\n'.join(output)
|
||||
bb.siggen.dump_sigfile(sys.argv[1])
|
||||
|
||||
@@ -6,6 +6,4 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), '
|
||||
|
||||
import bb.siggen
|
||||
|
||||
output = bb.siggen.dump_sigfile(sys.argv[1])
|
||||
if output:
|
||||
print '\n'.join(output)
|
||||
bb.siggen.dump_sigfile(sys.argv[1])
|
||||
|
||||
@@ -6,19 +6,6 @@
|
||||
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import cmd
|
||||
import logging
|
||||
@@ -35,14 +22,25 @@ import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
import bb.tinfoil
|
||||
from bb.cooker import state
|
||||
import bb.fetch2
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake')
|
||||
|
||||
|
||||
def main(args):
|
||||
cmds = Commands()
|
||||
# Set up logging
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
|
||||
initialenv = os.environ.copy()
|
||||
bb.utils.clean_environment()
|
||||
|
||||
cmds = Commands(initialenv)
|
||||
if args:
|
||||
# Allow user to specify e.g. show-layers instead of show_layers
|
||||
args = [args[0].replace('-', '_')] + args[1:]
|
||||
@@ -53,11 +51,42 @@ def main(args):
|
||||
|
||||
|
||||
class Commands(cmd.Cmd):
|
||||
def __init__(self):
|
||||
def __init__(self, initialenv):
|
||||
cmd.Cmd.__init__(self)
|
||||
self.bbhandler = bb.tinfoil.Tinfoil()
|
||||
self.returncode = 0
|
||||
self.bblayers = (self.bbhandler.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
self.config = Config(parse_only=True)
|
||||
self.cooker = bb.cooker.BBCooker(self.config,
|
||||
self.register_idle_function,
|
||||
initialenv)
|
||||
self.config_data = self.cooker.configuration.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
self.bblayers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||
|
||||
def register_idle_function(self, function, data):
|
||||
pass
|
||||
|
||||
def prepare_cooker(self):
|
||||
sys.stderr.write("Parsing recipes..")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
while self.cooker.state in (state.initial, state.parsing):
|
||||
self.cooker.updateCache()
|
||||
except KeyboardInterrupt:
|
||||
self.cooker.shutdown()
|
||||
self.cooker.updateCache()
|
||||
sys.exit(2)
|
||||
|
||||
logger.setLevel(logging.INFO)
|
||||
sys.stderr.write("done.\n")
|
||||
|
||||
self.cooker_data = self.cooker.status
|
||||
self.cooker_data.appends = self.cooker.appendlist
|
||||
|
||||
def check_prepare_cooker(self):
|
||||
if not self.cooker_data:
|
||||
self.prepare_cooker()
|
||||
|
||||
def default(self, line):
|
||||
"""Handle unrecognised commands"""
|
||||
@@ -82,13 +111,14 @@ class Commands(cmd.Cmd):
|
||||
|
||||
def do_show_layers(self, args):
|
||||
"""show current configured layers"""
|
||||
self.bbhandler.prepare(config_only = True)
|
||||
self.check_prepare_cooker()
|
||||
logger.plain('')
|
||||
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
|
||||
logger.plain('=' * 74)
|
||||
for layerdir in self.bblayers:
|
||||
layername = self.get_layer_name(layerdir)
|
||||
layerpri = 0
|
||||
for layer, _, regex, pri in self.bbhandler.cooker.status.bbfile_config_priorities:
|
||||
for layer, _, regex, pri in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
layerpri = pri
|
||||
break
|
||||
@@ -106,7 +136,7 @@ class Commands(cmd.Cmd):
|
||||
|
||||
|
||||
def do_show_overlayed(self, args):
|
||||
"""list overlayed recipes (where the same recipe exists in another layer)
|
||||
"""list overlayed recipes (where the same recipe exists in another layer that has a higher layer priority)
|
||||
|
||||
usage: show-overlayed [-f] [-s]
|
||||
|
||||
@@ -119,7 +149,7 @@ Options:
|
||||
recipes with the ones they overlay indented underneath
|
||||
-s only list overlayed recipes where the version is the same
|
||||
"""
|
||||
self.bbhandler.prepare()
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_same_ver_only = False
|
||||
@@ -151,7 +181,7 @@ Options:
|
||||
# factor - however, each layer.conf is free to either prepend or append to
|
||||
# BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might
|
||||
# not be exactly the order present in bblayers.conf either.
|
||||
bbpath = str(self.bbhandler.config_data.getVar('BBPATH', True))
|
||||
bbpath = str(self.config_data.getVar('BBPATH', True))
|
||||
overlayed_class_found = False
|
||||
for (classfile, classdirs) in classes.items():
|
||||
if len(classdirs) > 1:
|
||||
@@ -202,7 +232,7 @@ Options:
|
||||
-m only list where multiple recipes (in the same layer or different
|
||||
layers) exist for the same recipe name
|
||||
"""
|
||||
self.bbhandler.prepare()
|
||||
self.check_prepare_cooker()
|
||||
|
||||
show_filenames = False
|
||||
show_multi_provider_only = False
|
||||
@@ -224,15 +254,15 @@ Options:
|
||||
|
||||
|
||||
def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only):
|
||||
pkg_pn = self.bbhandler.cooker.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.bbhandler.cooker.configuration.data, self.bbhandler.cooker.status, pkg_pn)
|
||||
allproviders = bb.providers.allProviders(self.bbhandler.cooker.status)
|
||||
pkg_pn = self.cooker.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.cooker.configuration.data, self.cooker.status, pkg_pn)
|
||||
allproviders = bb.providers.allProviders(self.cooker.status)
|
||||
|
||||
# Ensure we list skipped recipes
|
||||
# We are largely guessing about PN, PV and the preferred version here,
|
||||
# but we have no choice since skipped recipes are not fully parsed
|
||||
skiplist = self.bbhandler.cooker.skiplist.keys()
|
||||
skiplist.sort( key=lambda fileitem: self.bbhandler.cooker.calc_bbfile_priority(fileitem) )
|
||||
skiplist = self.cooker.skiplist.keys()
|
||||
skiplist.sort( key=lambda fileitem: self.cooker.calc_bbfile_priority(fileitem) )
|
||||
skiplist.reverse()
|
||||
for fn in skiplist:
|
||||
recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_')
|
||||
@@ -340,7 +370,7 @@ build results (as the layer priority order has effectively changed).
|
||||
logger.error('Directory %s exists and is non-empty, please clear it out first' % outputdir)
|
||||
return
|
||||
|
||||
self.bbhandler.prepare()
|
||||
self.check_prepare_cooker()
|
||||
layers = self.bblayers
|
||||
if len(arglist) > 2:
|
||||
layernames = arglist[:-1]
|
||||
@@ -370,8 +400,8 @@ build results (as the layer priority order has effectively changed).
|
||||
appended_recipes = []
|
||||
for layer in layers:
|
||||
overlayed = []
|
||||
for f in self.bbhandler.cooker.overlayed.iterkeys():
|
||||
for of in self.bbhandler.cooker.overlayed[f]:
|
||||
for f in self.cooker.overlayed.iterkeys():
|
||||
for of in self.cooker.overlayed[f]:
|
||||
if of.startswith(layer):
|
||||
overlayed.append(of)
|
||||
|
||||
@@ -395,8 +425,8 @@ build results (as the layer priority order has effectively changed).
|
||||
logger.warn('Overwriting file %s', fdest)
|
||||
bb.utils.copyfile(f1full, fdest)
|
||||
if ext == '.bb':
|
||||
if f1 in self.bbhandler.cooker.appendlist:
|
||||
appends = self.bbhandler.cooker.appendlist[f1]
|
||||
if f1 in self.cooker_data.appends:
|
||||
appends = self.cooker_data.appends[f1]
|
||||
if appends:
|
||||
logger.plain(' Applying appends to %s' % fdest )
|
||||
for appendname in appends:
|
||||
@@ -405,9 +435,9 @@ build results (as the layer priority order has effectively changed).
|
||||
appended_recipes.append(f1)
|
||||
|
||||
# Take care of when some layers are excluded and yet we have included bbappends for those recipes
|
||||
for recipename in self.bbhandler.cooker.appendlist.iterkeys():
|
||||
for recipename in self.cooker_data.appends.iterkeys():
|
||||
if recipename not in appended_recipes:
|
||||
appends = self.bbhandler.cooker.appendlist[recipename]
|
||||
appends = self.cooker_data.appends[recipename]
|
||||
first_append = None
|
||||
for appendname in appends:
|
||||
layer = layer_path_match(appendname)
|
||||
@@ -425,14 +455,14 @@ build results (as the layer priority order has effectively changed).
|
||||
# have come from)
|
||||
first_regex = None
|
||||
layerdir = layers[0]
|
||||
for layername, pattern, regex, _ in self.bbhandler.cooker.status.bbfile_config_priorities:
|
||||
for layername, pattern, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
first_regex = regex
|
||||
break
|
||||
|
||||
if first_regex:
|
||||
# Find the BBFILES entries that match (which will have come from this conf/layer.conf file)
|
||||
bbfiles = str(self.bbhandler.config_data.getVar('BBFILES', True)).split()
|
||||
bbfiles = str(self.config_data.getVar('BBFILES', True)).split()
|
||||
bbfiles_layer = []
|
||||
for item in bbfiles:
|
||||
if first_regex.match(item):
|
||||
@@ -455,7 +485,7 @@ build results (as the layer priority order has effectively changed).
|
||||
logger.warning("File %s does not match the flattened layer's BBFILES setting, you may need to edit conf/layer.conf or move the file elsewhere" % f1full)
|
||||
|
||||
def get_file_layer(self, filename):
|
||||
for layer, _, regex, _ in self.bbhandler.cooker.status.bbfile_config_priorities:
|
||||
for layer, _, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||
if regex.match(filename):
|
||||
for layerdir in self.bblayers:
|
||||
if regex.match(os.path.join(layerdir, 'test')):
|
||||
@@ -481,14 +511,14 @@ usage: show-appends
|
||||
|
||||
Recipes are listed with the bbappends that apply to them as subitems.
|
||||
"""
|
||||
self.bbhandler.prepare()
|
||||
if not self.bbhandler.cooker.appendlist:
|
||||
self.check_prepare_cooker()
|
||||
if not self.cooker_data.appends:
|
||||
logger.plain('No append files found')
|
||||
return
|
||||
|
||||
logger.plain('=== Appended recipes ===')
|
||||
logger.plain('State of append files:')
|
||||
|
||||
pnlist = list(self.bbhandler.cooker_data.pkg_pn.keys())
|
||||
pnlist = list(self.cooker_data.pkg_pn.keys())
|
||||
pnlist.sort()
|
||||
for pn in pnlist:
|
||||
self.show_appends_for_pn(pn)
|
||||
@@ -496,19 +526,19 @@ Recipes are listed with the bbappends that apply to them as subitems.
|
||||
self.show_appends_for_skipped()
|
||||
|
||||
def show_appends_for_pn(self, pn):
|
||||
filenames = self.bbhandler.cooker_data.pkg_pn[pn]
|
||||
filenames = self.cooker_data.pkg_pn[pn]
|
||||
|
||||
best = bb.providers.findBestProvider(pn,
|
||||
self.bbhandler.cooker.configuration.data,
|
||||
self.bbhandler.cooker_data,
|
||||
self.bbhandler.cooker_data.pkg_pn)
|
||||
self.cooker.configuration.data,
|
||||
self.cooker_data,
|
||||
self.cooker_data.pkg_pn)
|
||||
best_filename = os.path.basename(best[3])
|
||||
|
||||
self.show_appends_output(filenames, best_filename)
|
||||
|
||||
def show_appends_for_skipped(self):
|
||||
filenames = [os.path.basename(f)
|
||||
for f in self.bbhandler.cooker.skiplist.iterkeys()]
|
||||
for f in self.cooker.skiplist.iterkeys()]
|
||||
self.show_appends_output(filenames, None, " (skipped)")
|
||||
|
||||
def show_appends_output(self, filenames, best_filename, name_suffix = ''):
|
||||
@@ -534,7 +564,7 @@ Recipes are listed with the bbappends that apply to them as subitems.
|
||||
continue
|
||||
|
||||
basename = os.path.basename(filename)
|
||||
appends = self.bbhandler.cooker.appendlist.get(basename)
|
||||
appends = self.cooker_data.appends.get(basename)
|
||||
if appends:
|
||||
appended.append((basename, list(appends)))
|
||||
else:
|
||||
@@ -542,5 +572,22 @@ Recipes are listed with the bbappends that apply to them as subitems.
|
||||
return appended, notappended
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self, **options):
|
||||
self.pkgs_to_build = []
|
||||
self.debug_domains = []
|
||||
self.extra_assume_provided = []
|
||||
self.prefile = []
|
||||
self.postfile = []
|
||||
self.debug = 0
|
||||
self.__dict__.update(options)
|
||||
|
||||
def __getattr__(self, attribute):
|
||||
try:
|
||||
return super(Config, self).__getattribute__(attribute)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]) or 0)
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import sys, logging
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
|
||||
|
||||
import unittest
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
tests = ["bb.tests.codeparser",
|
||||
"bb.tests.cow",
|
||||
"bb.tests.data",
|
||||
"bb.tests.fetch",
|
||||
"bb.tests.utils"]
|
||||
|
||||
for t in tests:
|
||||
__import__(t)
|
||||
|
||||
unittest.main(argv=["bitbake-selftest"] + tests)
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
|
||||
import os
|
||||
import sys
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname( \
|
||||
os.path.abspath(__file__))), 'lib'))
|
||||
try:
|
||||
import bb
|
||||
except RuntimeError as exc:
|
||||
sys.exit(str(exc))
|
||||
|
||||
import gtk
|
||||
import optparse
|
||||
import pygtk
|
||||
|
||||
from bb.ui.crumbs.hig import DeployImageDialog, ImageSelectionDialog, CrumbsMessageDialog
|
||||
from bb.ui.crumbs.hobwidget import HobAltButton, HobButton
|
||||
|
||||
# I put all the fs bitbake supported here. Need more test.
|
||||
DEPLOYABLE_IMAGE_TYPES = ["jffs2", "cramfs", "ext2", "ext3", "btrfs", "squashfs", "ubi", "vmdk"]
|
||||
Title = "USB Image Writer"
|
||||
|
||||
class DeployWindow(gtk.Window):
|
||||
def __init__(self, image_path=''):
|
||||
super(DeployWindow, self).__init__()
|
||||
|
||||
if len(image_path) > 0:
|
||||
valid = True
|
||||
if not os.path.exists(image_path):
|
||||
valid = False
|
||||
lbl = "<b>Invalid image file path: %s.</b>\nPress <b>Select Image</b> to select an image." % image_path
|
||||
else:
|
||||
image_path = os.path.abspath(image_path)
|
||||
extend_name = os.path.splitext(image_path)[1][1:]
|
||||
if extend_name not in DEPLOYABLE_IMAGE_TYPES:
|
||||
valid = False
|
||||
lbl = "<b>Undeployable imge type: %s</b>\nPress <b>Select Image</b> to select an image." % extend_name
|
||||
|
||||
if not valid:
|
||||
image_path = ''
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
|
||||
self.deploy_dialog = DeployImageDialog(Title, image_path, self,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
|
||||
| gtk.DIALOG_NO_SEPARATOR, None, standalone=True)
|
||||
close_button = self.deploy_dialog.add_button("Close", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(close_button)
|
||||
close_button.connect('clicked', gtk.main_quit)
|
||||
|
||||
write_button = self.deploy_dialog.add_button("Write USB image", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(write_button)
|
||||
|
||||
self.deploy_dialog.connect('select_image_clicked', self.select_image_clicked_cb)
|
||||
self.deploy_dialog.connect('destroy', gtk.main_quit)
|
||||
response = self.deploy_dialog.show()
|
||||
|
||||
def select_image_clicked_cb(self, dialog):
|
||||
cwd = os.getcwd()
|
||||
dialog = ImageSelectionDialog(cwd, DEPLOYABLE_IMAGE_TYPES, Title, self, gtk.FILE_CHOOSER_ACTION_SAVE )
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_NO)
|
||||
HobAltButton.style_button(button)
|
||||
button = dialog.add_button("Open", gtk.RESPONSE_YES)
|
||||
HobAltButton.style_button(button)
|
||||
response = dialog.run()
|
||||
|
||||
if response == gtk.RESPONSE_YES:
|
||||
if not dialog.image_names:
|
||||
lbl = "<b>No selections made</b>\nClicked the radio button to select a image."
|
||||
crumbs_dialog = CrumbsMessageDialog(self, lbl, gtk.STOCK_DIALOG_INFO)
|
||||
button = crumbs_dialog.add_button("Close", gtk.RESPONSE_OK)
|
||||
HobButton.style_button(button)
|
||||
crumbs_dialog.run()
|
||||
crumbs_dialog.destroy()
|
||||
dialog.destroy()
|
||||
return
|
||||
|
||||
# get the full path of image
|
||||
image_path = os.path.join(dialog.image_folder, dialog.image_names[0])
|
||||
self.deploy_dialog.set_image_text_buffer(image_path)
|
||||
self.deploy_dialog.set_image_path(image_path)
|
||||
|
||||
dialog.destroy()
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser(
|
||||
usage = """%prog [-h] [image_file]
|
||||
|
||||
%prog writes bootable images to USB devices. You can
|
||||
provide the image file on the command line or select it using the GUI.""")
|
||||
|
||||
options, args = parser.parse_args(sys.argv)
|
||||
image_file = args[1] if len(args) > 1 else ''
|
||||
dw = DeployWindow(image_file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
gtk.main()
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc(3)
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# Copyright (C) 2012 Wind River Systems, Inc.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
#
|
||||
# This is used for dumping the bb_cache.dat, the output format is:
|
||||
# recipe_path PN PV PACKAGES
|
||||
#
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# For importing bb.cache
|
||||
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
|
||||
from bb.cache import CoreRecipeInfo
|
||||
|
||||
import cPickle as pickle
|
||||
|
||||
def main(argv=None):
|
||||
"""
|
||||
Get the mapping for the target recipe.
|
||||
"""
|
||||
if len(argv) != 1:
|
||||
print >>sys.stderr, "Error, need one argument!"
|
||||
return 2
|
||||
|
||||
cachefile = argv[0]
|
||||
|
||||
with open(cachefile, "rb") as cachefile:
|
||||
pickled = pickle.Unpickler(cachefile)
|
||||
while cachefile:
|
||||
try:
|
||||
key = pickled.load()
|
||||
val = pickled.load()
|
||||
except Exception:
|
||||
break
|
||||
if isinstance(val, CoreRecipeInfo) and (not val.skipped):
|
||||
pn = val.pn
|
||||
# Filter out the native recipes.
|
||||
if key.startswith('virtual:native:') or pn.endswith("-native"):
|
||||
continue
|
||||
|
||||
# 1.0 is the default version for a no PV recipe.
|
||||
if val.__dict__.has_key("pv"):
|
||||
pv = val.pv
|
||||
else:
|
||||
pv = "1.0"
|
||||
|
||||
print("%s %s %s %s" % (key, pn, pv, ' '.join(val.packages)))
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
||||
@@ -103,13 +103,7 @@ Show debug logging for the specified logging domains
|
||||
.TP
|
||||
.B \-P, \-\-profile
|
||||
profile the command and print a report
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
bitbake uses the following environment variables to control its
|
||||
operation:
|
||||
.TP
|
||||
.B BITBAKE_UI
|
||||
The bitbake user interface; overridden by the \fB-u\fP commandline option.
|
||||
|
||||
.SH AUTHORS
|
||||
BitBake was written by
|
||||
|
||||
@@ -228,7 +228,7 @@ addtask printdate before do_build</screen></para>
|
||||
<para>'nostamp' - don't generate a stamp file for a task. This means the task is always rexecuted.</para>
|
||||
<para>'fakeroot' - this task needs to be run in a fakeroot environment, obtained by adding the variables in FAKEROOTENV to the environment.</para>
|
||||
<para>'umask' - the umask to run the task under.</para>
|
||||
<para> For the 'deptask', 'rdeptask', 'depends', 'rdepends' and 'recrdeptask' flags please see the dependencies section.</para>
|
||||
<para> For the 'deptask', 'rdeptask', 'recdeptask' and 'recrdeptask' flags please see the dependencies section.</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
@@ -308,35 +308,37 @@ SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;pat
|
||||
</section>
|
||||
<section>
|
||||
<title>Dependency handling</title>
|
||||
<para>BitBake handles dependencies at the task level since to allow for efficient operation with multiple processed executing in parallel. A robust method of specifying task dependencies is therefore needed. </para>
|
||||
<para>BitBake 1.7.x onwards works with the metadata at the task level since this is optimal when dealing with multiple threads of execution. A robust method of specifing task dependencies is therefore needed. </para>
|
||||
<section>
|
||||
<title>Dependencies internal to the .bb file</title>
|
||||
<para>Where the dependencies are internal to a given .bb file, the dependencies are handled by the previously detailed addtask directive.</para>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<title>Build Dependencies</title>
|
||||
<title>DEPENDS</title>
|
||||
<para>DEPENDS lists build time dependencies. The 'deptask' flag for tasks is used to signify the task of each item listed in DEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_configure[deptask] = "do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of each item in DEPENDS must have completed before do_configure can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Runtime Dependencies</title>
|
||||
<para>The PACKAGES variable lists runtime packages and each of these can have RDEPENDS and RRECOMMENDS runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each item runtime dependency which must have completed before that task can be executed.</para>
|
||||
<title>RDEPENDS</title>
|
||||
<para>RDEPENDS lists runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each item listed in RDEPENDS which must have completed before that task can be executed.</para>
|
||||
<para><screen>do_package_write[rdeptask] = "do_package"</screen></para>
|
||||
<para>means the do_package task of each item in RDEPENDS must have completed before do_package_write can execute.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive Dependencies</title>
|
||||
<para>These are specified with the 'recrdeptask' flag which is used signify the task(s) of dependencies which must have completed before that task can be executed. It works by looking though the build and runtime dependencies of the current recipe as well as any inter-task dependencies the task has, then adding a dependency on the listed task. It will then recurse through the dependencies of those tasks and so on.</para>
|
||||
<para>It may be desireable to recurse not just through the dependencies of those tasks but through the build and runtime dependencies of dependent tasks too. If that is the case, the taskname itself should be referenced in the task list, e.g. do_a[recrdeptask] = "do_a do_b".</para>
|
||||
<title>Recursive DEPENDS</title>
|
||||
<para>These are specified with the 'recdeptask' flag and is used signify the task(s) of each DEPENDS which must have completed before that task can be executed. It applies recursively so the DEPENDS of each item in the original DEPENDS must be met and so on.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Recursive RDEPENDS</title>
|
||||
<para>These are specified with the 'recrdeptask' flag and is used signify the task(s) of each RDEPENDS which must have completed before that task can be executed. It applies recursively so the RDEPENDS of each item in the original RDEPENDS must be met and so on. It also runs all DEPENDS first.</para>
|
||||
</section>
|
||||
<section>
|
||||
<title>Inter task</title>
|
||||
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS.</para>
|
||||
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS or RDEPENDS.</para>
|
||||
<para><screen>do_patch[depends] = "quilt-native:do_populate_staging"</screen></para>
|
||||
<para>means the do_populate_staging task of the target quilt-native must have completed before the do_patch can execute.</para>
|
||||
<para>The 'rdepends' flag works in a similar way but takes targets in the runtime namespace instead of the build time dependency namespace.</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
__version__ = "1.16.0"
|
||||
__version__ = "1.15.1"
|
||||
|
||||
import sys
|
||||
if sys.version_info < (2, 6, 0):
|
||||
|
||||
@@ -29,7 +29,6 @@ import os
|
||||
import sys
|
||||
import logging
|
||||
import shlex
|
||||
import glob
|
||||
import bb
|
||||
import bb.msg
|
||||
import bb.process
|
||||
@@ -73,7 +72,7 @@ class TaskBase(event.Event):
|
||||
self._task = t
|
||||
self._package = d.getVar("PF", True)
|
||||
event.Event.__init__(self)
|
||||
self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
|
||||
self._message = "package %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName())
|
||||
|
||||
def getTask(self):
|
||||
return self._task
|
||||
@@ -136,8 +135,7 @@ class LogTee(object):
|
||||
|
||||
def __repr__(self):
|
||||
return '<LogTee {0}>'.format(self.name)
|
||||
def flush(self):
|
||||
self.outfile.flush()
|
||||
|
||||
|
||||
def exec_func(func, d, dirs = None):
|
||||
"""Execute an BB 'function'"""
|
||||
@@ -176,19 +174,8 @@ def exec_func(func, d, dirs = None):
|
||||
lockfiles = None
|
||||
|
||||
tempdir = data.getVar('T', d, 1)
|
||||
|
||||
# or func allows items to be executed outside of the normal
|
||||
# task set, such as buildhistory
|
||||
task = data.getVar('BB_RUNTASK', d, 1) or func
|
||||
if task == func:
|
||||
taskfunc = task
|
||||
else:
|
||||
taskfunc = "%s.%s" % (task, func)
|
||||
|
||||
runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}"
|
||||
runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
|
||||
runfile = os.path.join(tempdir, runfn)
|
||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
|
||||
|
||||
with bb.utils.fileslocked(lockfiles):
|
||||
if ispython:
|
||||
@@ -219,8 +206,6 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
olddir = None
|
||||
os.chdir(cwd)
|
||||
|
||||
bb.debug(2, "Executing python function %s" % func)
|
||||
|
||||
try:
|
||||
comp = utils.better_compile(code, func, bbfile)
|
||||
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||
@@ -230,15 +215,13 @@ def exec_func_python(func, d, runfile, cwd=None):
|
||||
|
||||
raise FuncFailed(func, None)
|
||||
finally:
|
||||
bb.debug(2, "Python function %s finished" % func)
|
||||
|
||||
if cwd and olddir:
|
||||
try:
|
||||
os.chdir(olddir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def exec_func_shell(func, d, runfile, cwd=None):
|
||||
def exec_func_shell(function, d, runfile, cwd=None):
|
||||
"""Execute a shell function from the metadata
|
||||
|
||||
Note on directory behavior. The 'dirs' varflag should contain a list
|
||||
@@ -251,18 +234,18 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
|
||||
with open(runfile, 'w') as script:
|
||||
script.write('#!/bin/sh -e\n')
|
||||
data.emit_func(func, script, d)
|
||||
data.emit_func(function, script, d)
|
||||
|
||||
if bb.msg.loggerVerboseLogs:
|
||||
script.write("set -x\n")
|
||||
if cwd:
|
||||
script.write("cd %s\n" % cwd)
|
||||
script.write("%s\n" % func)
|
||||
script.write("%s\n" % function)
|
||||
|
||||
os.chmod(runfile, 0775)
|
||||
|
||||
cmd = runfile
|
||||
if d.getVarFlag(func, 'fakeroot'):
|
||||
if d.getVarFlag(function, 'fakeroot'):
|
||||
fakerootcmd = d.getVar('FAKEROOT', True)
|
||||
if fakerootcmd:
|
||||
cmd = [fakerootcmd, runfile]
|
||||
@@ -272,15 +255,11 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||
else:
|
||||
logfile = sys.stdout
|
||||
|
||||
bb.debug(2, "Executing shell function %s" % func)
|
||||
|
||||
try:
|
||||
bb.process.run(cmd, shell=False, stdin=NULL, log=logfile)
|
||||
except bb.process.CmdError:
|
||||
logfn = d.getVar('BB_LOGFILE', True)
|
||||
raise FuncFailed(func, logfn)
|
||||
|
||||
bb.debug(2, "Shell function %s finished" % func)
|
||||
raise FuncFailed(function, logfn)
|
||||
|
||||
def _task_data(fn, task, d):
|
||||
localdata = data.createCopy(d)
|
||||
@@ -311,23 +290,8 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bb.fatal("T variable not set, unable to build")
|
||||
|
||||
bb.utils.mkdirhier(tempdir)
|
||||
|
||||
# Determine the logfile to generate
|
||||
logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}'
|
||||
logbase = logfmt.format(task=task, pid=os.getpid())
|
||||
|
||||
# Document the order of the tasks...
|
||||
logorder = os.path.join(tempdir, 'log.task_order')
|
||||
try:
|
||||
logorderfile = file(logorder, 'a')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logorder)
|
||||
pass
|
||||
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
|
||||
logorderfile.close()
|
||||
|
||||
# Setup the courtesy link to the logfn
|
||||
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
|
||||
logbase = 'log.{0}.{1}'.format(task, os.getpid())
|
||||
logfn = os.path.join(tempdir, logbase)
|
||||
if loglink:
|
||||
bb.utils.remove(loglink)
|
||||
@@ -350,7 +314,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
# Handle logfiles
|
||||
si = file('/dev/null', 'r')
|
||||
try:
|
||||
bb.utils.mkdirhier(os.path.dirname(logfn))
|
||||
logfile = file(logfn, 'w')
|
||||
except OSError:
|
||||
logger.exception("Opening log file '%s'", logfn)
|
||||
@@ -377,7 +340,6 @@ def _exec_task(fn, task, d, quieterr):
|
||||
bblogger.addHandler(errchk)
|
||||
|
||||
localdata.setVar('BB_LOGFILE', logfn)
|
||||
localdata.setVar('BB_RUNTASK', task)
|
||||
|
||||
event.fire(TaskStarted(task, localdata), localdata)
|
||||
try:
|
||||
@@ -465,55 +427,15 @@ def stamp_internal(taskname, d, file_name):
|
||||
|
||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
stampdir = os.path.dirname(stamp)
|
||||
if bb.parse.cached_mtime_noerror(stampdir) == 0:
|
||||
bb.utils.mkdirhier(stampdir)
|
||||
bb.utils.mkdirhier(os.path.dirname(stamp))
|
||||
|
||||
return stamp
|
||||
|
||||
def stamp_cleanmask_internal(taskname, d, file_name):
|
||||
"""
|
||||
Internal stamp helper function to generate stamp cleaning mask
|
||||
Returns the stamp path+filename
|
||||
|
||||
In the bitbake core, d can be a CacheData and file_name will be set.
|
||||
When called in task context, d will be a data store, file_name will not be set
|
||||
"""
|
||||
taskflagname = taskname
|
||||
if taskname.endswith("_setscene") and taskname != "do_setscene":
|
||||
taskflagname = taskname.replace("_setscene", "")
|
||||
|
||||
if file_name:
|
||||
stamp = d.stamp_base_clean[file_name].get(taskflagname) or d.stampclean[file_name]
|
||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||
else:
|
||||
stamp = d.getVarFlag(taskflagname, 'stamp-base-clean', True) or d.getVar('STAMPCLEAN', True)
|
||||
file_name = d.getVar('BB_FILENAME', True)
|
||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||
|
||||
if not stamp:
|
||||
return []
|
||||
|
||||
cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
|
||||
|
||||
return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
|
||||
|
||||
def make_stamp(task, d, file_name = None):
|
||||
"""
|
||||
Creates/updates a stamp for a given task
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
cleanmask = stamp_cleanmask_internal(task, d, file_name)
|
||||
for mask in cleanmask:
|
||||
for name in glob.glob(mask):
|
||||
# Preserve sigdata files in the stamps directory
|
||||
if "sigdata" in name:
|
||||
continue
|
||||
# Preserve taint files in the stamps directory
|
||||
if name.endswith('.taint'):
|
||||
continue
|
||||
os.unlink(name)
|
||||
|
||||
stamp = stamp_internal(task, d, file_name)
|
||||
# Remove the file and recreate to force timestamp
|
||||
# change on broken NFS filesystems
|
||||
@@ -536,24 +458,6 @@ def del_stamp(task, d, file_name = None):
|
||||
stamp = stamp_internal(task, d, file_name)
|
||||
bb.utils.remove(stamp)
|
||||
|
||||
def write_taint(task, d, file_name = None):
|
||||
"""
|
||||
Creates a "taint" file which will force the specified task and its
|
||||
dependents to be re-run the next time by influencing the value of its
|
||||
taskhash.
|
||||
(d can be a data dict or dataCache)
|
||||
"""
|
||||
import uuid
|
||||
if file_name:
|
||||
taintfn = d.stamp[file_name] + '.' + task + '.taint'
|
||||
else:
|
||||
taintfn = d.getVar('STAMP', True) + '.' + task + '.taint'
|
||||
bb.utils.mkdirhier(os.path.dirname(taintfn))
|
||||
# The specific content of the taint file is not really important,
|
||||
# we just need it to be random, so a random UUID is used
|
||||
with open(taintfn, 'w') as taintf:
|
||||
taintf.write(str(uuid.uuid4()))
|
||||
|
||||
def stampfile(taskname, d, file_name = None):
|
||||
"""
|
||||
Return the stamp for a given task
|
||||
@@ -585,7 +489,6 @@ def add_tasks(tasklist, d):
|
||||
deptask = data.expand(flags[name], d)
|
||||
task_deps[name][task] = deptask
|
||||
getTask('depends')
|
||||
getTask('rdepends')
|
||||
getTask('deptask')
|
||||
getTask('rdeptask')
|
||||
getTask('recrdeptask')
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# ex:ts=4:sw=4:sts=4:et
|
||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||
#
|
||||
# BitBake Cache implementation
|
||||
# BitBake 'Event' implementation
|
||||
#
|
||||
# Caching of bitbake variables before task execution
|
||||
|
||||
# Copyright (C) 2006 Richard Purdie
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
|
||||
# but small sections based on code from bin/bitbake:
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
@@ -43,7 +42,7 @@ except ImportError:
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
__cache_version__ = "145"
|
||||
__cache_version__ = "143"
|
||||
|
||||
def getCacheFile(path, filename, data_hash):
|
||||
return os.path.join(path, filename + "." + data_hash)
|
||||
@@ -76,13 +75,9 @@ class RecipeInfoCommon(object):
|
||||
for task in tasks)
|
||||
|
||||
@classmethod
|
||||
def flaglist(cls, flag, varlist, metadata, squash=False):
|
||||
out_dict = dict((var, metadata.getVarFlag(var, flag, True))
|
||||
def flaglist(cls, flag, varlist, metadata):
|
||||
return dict((var, metadata.getVarFlag(var, flag, True))
|
||||
for var in varlist)
|
||||
if squash:
|
||||
return dict((k,v) for (k,v) in out_dict.iteritems() if v)
|
||||
else:
|
||||
return out_dict
|
||||
|
||||
@classmethod
|
||||
def getvar(cls, var, metadata):
|
||||
@@ -130,11 +125,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
self.broken = self.getvar('BROKEN', metadata)
|
||||
self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
|
||||
self.stamp = self.getvar('STAMP', metadata)
|
||||
self.stampclean = self.getvar('STAMPCLEAN', metadata)
|
||||
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||
self.stamp_base_clean = self.flaglist('stamp-base-clean', self.tasks, metadata)
|
||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||
self.depends = self.depvar('DEPENDS', metadata)
|
||||
self.provides = self.depvar('PROVIDES', metadata)
|
||||
@@ -159,11 +151,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.pkg_dp = {}
|
||||
|
||||
cachedata.stamp = {}
|
||||
cachedata.stampclean = {}
|
||||
cachedata.stamp_base = {}
|
||||
cachedata.stamp_base_clean = {}
|
||||
cachedata.stamp_extrainfo = {}
|
||||
cachedata.file_checksums = {}
|
||||
cachedata.fn_provides = {}
|
||||
cachedata.pn_provides = defaultdict(list)
|
||||
cachedata.all_depends = []
|
||||
@@ -193,11 +182,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||
cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
|
||||
cachedata.pkg_dp[fn] = self.defaultpref
|
||||
cachedata.stamp[fn] = self.stamp
|
||||
cachedata.stampclean[fn] = self.stampclean
|
||||
cachedata.stamp_base[fn] = self.stamp_base
|
||||
cachedata.stamp_base_clean[fn] = self.stamp_base_clean
|
||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||
cachedata.file_checksums[fn] = self.file_checksums
|
||||
|
||||
provides = [self.pn]
|
||||
for provide in self.provides:
|
||||
@@ -717,115 +703,4 @@ class CacheData(object):
|
||||
for info in info_array:
|
||||
info.add_cacheData(self, fn)
|
||||
|
||||
|
||||
class MultiProcessCache(object):
|
||||
"""
|
||||
BitBake multi-process cache implementation
|
||||
|
||||
Used by the codeparser & file checksum caches
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cachefile = None
|
||||
self.cachedata = self.create_cachedata()
|
||||
self.cachedata_extras = self.create_cachedata()
|
||||
|
||||
def init_cache(self, d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
self.cachefile = os.path.join(cachedir, self.__class__.cache_file_name)
|
||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
return
|
||||
|
||||
self.cachedata = data
|
||||
|
||||
def internSet(self, items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def compress_keys(self, data):
|
||||
# Override in subclasses if desired
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}]
|
||||
return data
|
||||
|
||||
def save_extras(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(self.cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for j in range(0,len(dest)):
|
||||
for h in source[j]:
|
||||
if h not in dest[j]:
|
||||
dest[j][h] = source[j][h]
|
||||
|
||||
def save_merge(self, d):
|
||||
if not self.cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(self.cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
data = self.create_cachedata()
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = self.create_cachedata(), None
|
||||
|
||||
if version != self.__class__.CACHE_VERSION:
|
||||
continue
|
||||
|
||||
self.merge_data(extradata, data)
|
||||
os.unlink(f)
|
||||
|
||||
self.compress_keys(data)
|
||||
|
||||
p = pickle.Pickler(file(self.cachefile, "wb"), -1)
|
||||
p.dump([data, self.__class__.CACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# Local file checksum cache implementation
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import stat
|
||||
import bb.utils
|
||||
import logging
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
logger = logging.getLogger("BitBake.Cache")
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
except ImportError:
|
||||
import pickle
|
||||
logger.info("Importing cPickle failed. "
|
||||
"Falling back to a very slow implementation.")
|
||||
|
||||
|
||||
# mtime cache (non-persistent)
|
||||
# based upon the assumption that files do not change during bitbake run
|
||||
class FileMtimeCache(object):
|
||||
cache = {}
|
||||
|
||||
def cached_mtime(self, f):
|
||||
if f not in self.cache:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def cached_mtime_noerror(self, f):
|
||||
if f not in self.cache:
|
||||
try:
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
except OSError:
|
||||
return 0
|
||||
return self.cache[f]
|
||||
|
||||
def update_mtime(self, f):
|
||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
||||
return self.cache[f]
|
||||
|
||||
def clear(self):
|
||||
self.cache.clear()
|
||||
|
||||
# Checksum + mtime cache (persistent)
|
||||
class FileChecksumCache(MultiProcessCache):
|
||||
cache_file_name = "local_file_checksum_cache.dat"
|
||||
CACHE_VERSION = 1
|
||||
|
||||
def __init__(self):
|
||||
self.mtime_cache = FileMtimeCache()
|
||||
MultiProcessCache.__init__(self)
|
||||
|
||||
def get_checksum(self, f):
|
||||
entry = self.cachedata[0].get(f)
|
||||
cmtime = self.mtime_cache.cached_mtime(f)
|
||||
if entry:
|
||||
(mtime, hashval) = entry
|
||||
if cmtime == mtime:
|
||||
return hashval
|
||||
else:
|
||||
bb.debug(2, "file %s changed mtime, recompute checksum" % f)
|
||||
|
||||
hashval = bb.utils.md5_file(f)
|
||||
self.cachedata_extras[0][f] = (cmtime, hashval)
|
||||
return hashval
|
||||
|
||||
def merge_data(self, source, dest):
|
||||
for h in source[0]:
|
||||
if h in dest:
|
||||
(smtime, _) = source[0][h]
|
||||
(dmtime, _) = dest[0][h]
|
||||
if smtime > dmtime:
|
||||
dest[0][h] = source[0][h]
|
||||
else:
|
||||
dest[0][h] = source[0][h]
|
||||
@@ -5,10 +5,10 @@ import os.path
|
||||
import bb.utils, bb.data
|
||||
from itertools import chain
|
||||
from pysh import pyshyacc, pyshlex, sherrors
|
||||
from bb.cache import MultiProcessCache
|
||||
|
||||
|
||||
logger = logging.getLogger('BitBake.CodeParser')
|
||||
PARSERCACHE_VERSION = 2
|
||||
|
||||
try:
|
||||
import cPickle as pickle
|
||||
@@ -32,56 +32,133 @@ def check_indent(codestr):
|
||||
|
||||
return codestr
|
||||
|
||||
pythonparsecache = {}
|
||||
shellparsecache = {}
|
||||
pythonparsecacheextras = {}
|
||||
shellparsecacheextras = {}
|
||||
|
||||
class CodeParserCache(MultiProcessCache):
|
||||
cache_file_name = "bb_codeparser.dat"
|
||||
CACHE_VERSION = 3
|
||||
|
||||
def __init__(self):
|
||||
MultiProcessCache.__init__(self)
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
self.pythoncacheextras = self.cachedata_extras[0]
|
||||
self.shellcacheextras = self.cachedata_extras[1]
|
||||
|
||||
def init_cache(self, d):
|
||||
MultiProcessCache.init_cache(self, d)
|
||||
|
||||
# cachedata gets re-assigned in the parent
|
||||
self.pythoncache = self.cachedata[0]
|
||||
self.shellcache = self.cachedata[1]
|
||||
|
||||
def compress_keys(self, data):
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = self.internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = self.internSet(data[0][h]["execs"])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = self.internSet(data[1][h]["execs"])
|
||||
return
|
||||
|
||||
def create_cachedata(self):
|
||||
data = [{}, {}]
|
||||
return data
|
||||
|
||||
codeparsercache = CodeParserCache()
|
||||
def parser_cachefile(d):
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or
|
||||
d.getVar("CACHE", True))
|
||||
if cachedir in [None, '']:
|
||||
return None
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
|
||||
logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
|
||||
return cachefile
|
||||
|
||||
def parser_cache_init(d):
|
||||
codeparsercache.init_cache(d)
|
||||
global pythonparsecache
|
||||
global shellparsecache
|
||||
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except:
|
||||
return
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
return
|
||||
|
||||
pythonparsecache = data[0]
|
||||
shellparsecache = data[1]
|
||||
|
||||
def parser_cache_save(d):
|
||||
codeparsercache.save_extras(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock", shared=True)
|
||||
|
||||
i = os.getpid()
|
||||
lf = None
|
||||
while not lf:
|
||||
shellcache = {}
|
||||
pythoncache = {}
|
||||
|
||||
lf = bb.utils.lockfile(cachefile + ".lock." + str(i), retry=False)
|
||||
if not lf or os.path.exists(cachefile + "-" + str(i)):
|
||||
if lf:
|
||||
bb.utils.unlockfile(lf)
|
||||
lf = None
|
||||
i = i + 1
|
||||
continue
|
||||
|
||||
shellcache = shellparsecacheextras
|
||||
pythoncache = pythonparsecacheextras
|
||||
|
||||
p = pickle.Pickler(file(cachefile + "-" + str(i), "wb"), -1)
|
||||
p.dump([[pythoncache, shellcache], PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(lf)
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
def internSet(items):
|
||||
new = set()
|
||||
for i in items:
|
||||
new.add(intern(i))
|
||||
return new
|
||||
|
||||
def parser_cache_savemerge(d):
|
||||
codeparsercache.save_merge(d)
|
||||
cachefile = parser_cachefile(d)
|
||||
if not cachefile:
|
||||
return
|
||||
|
||||
glf = bb.utils.lockfile(cachefile + ".lock")
|
||||
|
||||
try:
|
||||
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||
data, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
data, version = None, None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
data = [{}, {}]
|
||||
|
||||
for f in [y for y in os.listdir(os.path.dirname(cachefile)) if y.startswith(os.path.basename(cachefile) + '-')]:
|
||||
f = os.path.join(os.path.dirname(cachefile), f)
|
||||
try:
|
||||
p = pickle.Unpickler(file(f, "rb"))
|
||||
extradata, version = p.load()
|
||||
except (IOError, EOFError):
|
||||
extradata, version = [{}, {}], None
|
||||
|
||||
if version != PARSERCACHE_VERSION:
|
||||
continue
|
||||
|
||||
for h in extradata[0]:
|
||||
if h not in data[0]:
|
||||
data[0][h] = extradata[0][h]
|
||||
for h in extradata[1]:
|
||||
if h not in data[1]:
|
||||
data[1][h] = extradata[1][h]
|
||||
os.unlink(f)
|
||||
|
||||
# When the dicts are originally created, python calls intern() on the set keys
|
||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
||||
# in memory. This also means pickle will save the same string multiple times in
|
||||
# the cache file. By interning the data here, the cache file shrinks dramatically
|
||||
# meaning faster load times and the reloaded cache files also consume much less
|
||||
# memory. This is worth any performance hit from this loops and the use of the
|
||||
# intern() data storage.
|
||||
# Python 3.x may behave better in this area
|
||||
for h in data[0]:
|
||||
data[0][h]["refs"] = internSet(data[0][h]["refs"])
|
||||
data[0][h]["execs"] = internSet(data[0][h]["execs"])
|
||||
for h in data[1]:
|
||||
data[1][h]["execs"] = internSet(data[1][h]["execs"])
|
||||
|
||||
p = pickle.Pickler(file(cachefile, "wb"), -1)
|
||||
p.dump([data, PARSERCACHE_VERSION])
|
||||
|
||||
bb.utils.unlockfile(glf)
|
||||
|
||||
|
||||
Logger = logging.getLoggerClass()
|
||||
class BufferedLogger(Logger):
|
||||
@@ -100,8 +177,7 @@ class BufferedLogger(Logger):
|
||||
self.buffer = []
|
||||
|
||||
class PythonParser():
|
||||
getvars = ("d.getVar", "bb.data.getVar", "data.getVar", "d.appendVar", "d.prependVar")
|
||||
containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains")
|
||||
getvars = ("d.getVar", "bb.data.getVar", "data.getVar")
|
||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||
|
||||
def warn(self, func, arg):
|
||||
@@ -120,7 +196,7 @@ class PythonParser():
|
||||
|
||||
def visit_Call(self, node):
|
||||
name = self.called_node_name(node.func)
|
||||
if name in self.getvars or name in self.containsfuncs:
|
||||
if name in self.getvars:
|
||||
if isinstance(node.args[0], ast.Str):
|
||||
self.var_references.add(node.args[0].s)
|
||||
else:
|
||||
@@ -159,14 +235,14 @@ class PythonParser():
|
||||
def parse_python(self, node):
|
||||
h = hash(str(node))
|
||||
|
||||
if h in codeparsercache.pythoncache:
|
||||
self.references = codeparsercache.pythoncache[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncache[h]["execs"]
|
||||
if h in pythonparsecache:
|
||||
self.references = pythonparsecache[h]["refs"]
|
||||
self.execs = pythonparsecache[h]["execs"]
|
||||
return
|
||||
|
||||
if h in codeparsercache.pythoncacheextras:
|
||||
self.references = codeparsercache.pythoncacheextras[h]["refs"]
|
||||
self.execs = codeparsercache.pythoncacheextras[h]["execs"]
|
||||
if h in pythonparsecacheextras:
|
||||
self.references = pythonparsecacheextras[h]["refs"]
|
||||
self.execs = pythonparsecacheextras[h]["execs"]
|
||||
return
|
||||
|
||||
|
||||
@@ -180,9 +256,9 @@ class PythonParser():
|
||||
self.references.update(self.var_references)
|
||||
self.references.update(self.var_execs)
|
||||
|
||||
codeparsercache.pythoncacheextras[h] = {}
|
||||
codeparsercache.pythoncacheextras[h]["refs"] = self.references
|
||||
codeparsercache.pythoncacheextras[h]["execs"] = self.execs
|
||||
pythonparsecacheextras[h] = {}
|
||||
pythonparsecacheextras[h]["refs"] = self.references
|
||||
pythonparsecacheextras[h]["execs"] = self.execs
|
||||
|
||||
class ShellParser():
|
||||
def __init__(self, name, log):
|
||||
@@ -200,12 +276,12 @@ class ShellParser():
|
||||
|
||||
h = hash(str(value))
|
||||
|
||||
if h in codeparsercache.shellcache:
|
||||
self.execs = codeparsercache.shellcache[h]["execs"]
|
||||
if h in shellparsecache:
|
||||
self.execs = shellparsecache[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
if h in codeparsercache.shellcacheextras:
|
||||
self.execs = codeparsercache.shellcacheextras[h]["execs"]
|
||||
if h in shellparsecacheextras:
|
||||
self.execs = shellparsecacheextras[h]["execs"]
|
||||
return self.execs
|
||||
|
||||
try:
|
||||
@@ -217,8 +293,8 @@ class ShellParser():
|
||||
self.process_tokens(token)
|
||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||
|
||||
codeparsercache.shellcacheextras[h] = {}
|
||||
codeparsercache.shellcacheextras[h]["execs"] = self.execs
|
||||
shellparsecacheextras[h] = {}
|
||||
shellparsecacheextras[h]["execs"] = self.execs
|
||||
|
||||
return self.execs
|
||||
|
||||
|
||||
@@ -44,9 +44,6 @@ class CommandFailed(CommandExit):
|
||||
self.error = message
|
||||
CommandExit.__init__(self, 1)
|
||||
|
||||
class CommandError(Exception):
|
||||
pass
|
||||
|
||||
class Command:
|
||||
"""
|
||||
A queue of asynchronous commands for bitbake
|
||||
@@ -60,25 +57,21 @@ class Command:
|
||||
self.currentAsyncCommand = None
|
||||
|
||||
def runCommand(self, commandline):
|
||||
command = commandline.pop(0)
|
||||
if hasattr(CommandsSync, command):
|
||||
# Can run synchronous commands straight away
|
||||
command_method = getattr(self.cmds_sync, command)
|
||||
try:
|
||||
result = command_method(self, commandline)
|
||||
except CommandError as exc:
|
||||
return None, exc.args[0]
|
||||
except Exception:
|
||||
return None, traceback.format_exc()
|
||||
else:
|
||||
return result, None
|
||||
if self.currentAsyncCommand is not None:
|
||||
return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
|
||||
if command not in CommandsAsync.__dict__:
|
||||
return None, "No such command"
|
||||
self.currentAsyncCommand = (command, commandline)
|
||||
self.cooker.server_registration_cb(self.cooker.runCommands, self.cooker)
|
||||
return True, None
|
||||
try:
|
||||
command = commandline.pop(0)
|
||||
if command in CommandsSync.__dict__:
|
||||
# Can run synchronous commands straight away
|
||||
return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
|
||||
if self.currentAsyncCommand is not None:
|
||||
return "Busy (%s in progress)" % self.currentAsyncCommand[0]
|
||||
if command not in CommandsAsync.__dict__:
|
||||
return "No such command"
|
||||
self.currentAsyncCommand = (command, commandline)
|
||||
self.cooker.server_registration_cb(self.cooker.runCommands, self.cooker)
|
||||
return True
|
||||
except:
|
||||
import traceback
|
||||
return traceback.format_exc()
|
||||
|
||||
def runAsyncCommand(self):
|
||||
try:
|
||||
@@ -146,11 +139,7 @@ class CommandsSync:
|
||||
"""
|
||||
Get any command parsed from the commandline
|
||||
"""
|
||||
cmd_action = command.cooker.commandlineAction
|
||||
if cmd_action['msg']:
|
||||
raise CommandError(msg)
|
||||
else:
|
||||
return cmd_action['action']
|
||||
return command.cooker.commandlineAction
|
||||
|
||||
def getVariable(self, command, params):
|
||||
"""
|
||||
@@ -168,7 +157,7 @@ class CommandsSync:
|
||||
Set the value of variable in configuration.data
|
||||
"""
|
||||
varname = params[0]
|
||||
value = str(params[1])
|
||||
value = params[1]
|
||||
command.cooker.configuration.data.setVar(varname, value)
|
||||
|
||||
def initCooker(self, command, params):
|
||||
@@ -190,13 +179,6 @@ class CommandsSync:
|
||||
"""
|
||||
return bb.utils.cpu_count()
|
||||
|
||||
def setConfFilter(self, command, params):
|
||||
"""
|
||||
Set the configuration file parsing filter
|
||||
"""
|
||||
filterfunc = params[0]
|
||||
bb.parse.parse_py.ConfHandler.confFilters.append(filterfunc)
|
||||
|
||||
class CommandsAsync:
|
||||
"""
|
||||
A class of asynchronous commands
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
"""Code pulled from future python versions, here for compatibility"""
|
||||
|
||||
from collections import MutableMapping, KeysView, ValuesView, ItemsView
|
||||
try:
|
||||
from thread import get_ident as _get_ident
|
||||
except ImportError:
|
||||
from dummy_thread import get_ident as _get_ident
|
||||
|
||||
def total_ordering(cls):
|
||||
"""Class decorator that fills in missing ordering methods"""
|
||||
convert = {
|
||||
@@ -32,210 +26,3 @@ def total_ordering(cls):
|
||||
opfunc.__doc__ = getattr(int, opname).__doc__
|
||||
setattr(cls, opname, opfunc)
|
||||
return cls
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as regular dictionaries.
|
||||
|
||||
# The internal self.__map dict maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. The signature is the same as
|
||||
regular dictionaries, but keyword arguments are not recommended because
|
||||
their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__root = root = [] # sentinel node
|
||||
root[:] = [root, root, None]
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value, PREV=0, NEXT=1, dict_setitem=dict.__setitem__):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link at the end of the linked list,
|
||||
# and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
root = self.__root
|
||||
last = root[PREV]
|
||||
last[NEXT] = root[PREV] = self.__map[key] = [last, root, key]
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, PREV=0, NEXT=1, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which gets
|
||||
# removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link_prev, link_next, key = self.__map.pop(key)
|
||||
link_prev[NEXT] = link_next
|
||||
link_next[PREV] = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
# Traverse the linked list in order.
|
||||
NEXT, KEY = 1, 2
|
||||
root = self.__root
|
||||
curr = root[NEXT]
|
||||
while curr is not root:
|
||||
yield curr[KEY]
|
||||
curr = curr[NEXT]
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
# Traverse the linked list in reverse order.
|
||||
PREV, KEY = 0, 2
|
||||
root = self.__root
|
||||
curr = root[PREV]
|
||||
while curr is not root:
|
||||
yield curr[KEY]
|
||||
curr = curr[PREV]
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
for node in self.__map.itervalues():
|
||||
del node[:]
|
||||
root = self.__root
|
||||
root[:] = [root, root, None]
|
||||
self.__map.clear()
|
||||
dict.clear(self)
|
||||
|
||||
# -- the following methods do not depend on the internal structure --
|
||||
|
||||
def keys(self):
|
||||
'od.keys() -> list of keys in od'
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
'od.values() -> list of values in od'
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self):
|
||||
'od.items() -> list of (key, value) pairs in od'
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def iterkeys(self):
|
||||
'od.iterkeys() -> an iterator over the keys in od'
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
'od.itervalues -> an iterator over the values in od'
|
||||
for k in self:
|
||||
yield self[k]
|
||||
|
||||
def iteritems(self):
|
||||
'od.iteritems -> an iterator over the (key, value) pairs in od'
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
update = MutableMapping.update
|
||||
|
||||
__update = update # let subclasses override update without breaking __init__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
||||
value. If key is not found, d is returned if given, otherwise KeyError
|
||||
is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
key = next(reversed(self) if last else iter(self))
|
||||
value = self.pop(key)
|
||||
return key, value
|
||||
|
||||
def __repr__(self, _repr_running={}):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
call_key = id(self), _get_ident()
|
||||
if call_key in _repr_running:
|
||||
return '...'
|
||||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
finally:
|
||||
del _repr_running[call_key]
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
|
||||
If not specified, the value defaults to None.
|
||||
|
||||
'''
|
||||
self = cls()
|
||||
for key in iterable:
|
||||
self[key] = value
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and self.items() == other.items()
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
'od.__ne__(y) <==> od!=y'
|
||||
return not self == other
|
||||
|
||||
# -- the following methods support python 3.x style dictionary views --
|
||||
|
||||
def viewkeys(self):
|
||||
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
||||
return KeysView(self)
|
||||
|
||||
def viewvalues(self):
|
||||
"od.viewvalues() -> an object providing a view on od's values"
|
||||
return ValuesView(self)
|
||||
|
||||
def viewitems(self):
|
||||
"od.viewitems() -> a set-like object providing a view on od's items"
|
||||
return ItemsView(self)
|
||||
|
||||
@@ -158,7 +158,6 @@ class BBCooker:
|
||||
#
|
||||
self.configuration.event_data = bb.data.createCopy(self.configuration.data)
|
||||
bb.data.update_data(self.configuration.event_data)
|
||||
bb.parse.init_parser(self.configuration.event_data)
|
||||
|
||||
# TOSTOP must not be set or our children will hang when they output
|
||||
fd = sys.stdout.fileno()
|
||||
@@ -219,12 +218,6 @@ class BBCooker:
|
||||
nice = int(nice) - curnice
|
||||
buildlog.verbose("Renice to %s " % os.nice(nice))
|
||||
|
||||
if self.status:
|
||||
del self.status
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
def parseCommandLine(self):
|
||||
# Parse any commandline into actions
|
||||
self.commandlineAction = {'action':None, 'msg':None}
|
||||
@@ -278,8 +271,8 @@ class BBCooker:
|
||||
pkg_pn = self.status.pkg_pn
|
||||
(latest_versions, preferred_versions) = bb.providers.findProviders(self.configuration.data, self.status, pkg_pn)
|
||||
|
||||
logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version")
|
||||
logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================")
|
||||
logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
|
||||
logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
|
||||
|
||||
for p in sorted(pkg_pn):
|
||||
pref = preferred_versions[p]
|
||||
@@ -304,6 +297,8 @@ class BBCooker:
|
||||
# Parse the configuration here. We need to do it explicitly here since
|
||||
# this showEnvironment() code path doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||
fn = self.matchFile(fn)
|
||||
@@ -539,15 +534,11 @@ class BBCooker:
|
||||
|
||||
# Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
|
||||
depends_file = file('pn-depends.dot', 'w' )
|
||||
buildlist_file = file('pn-buildlist', 'w' )
|
||||
print("digraph depends {", file=depends_file)
|
||||
for pn in depgraph["pn"]:
|
||||
fn = depgraph["pn"][pn]["filename"]
|
||||
version = depgraph["pn"][pn]["version"]
|
||||
print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
|
||||
print("%s" % pn, file=buildlist_file)
|
||||
buildlist_file.close()
|
||||
logger.info("PN build list saved to 'pn-buildlist'")
|
||||
for pn in depgraph["depends"]:
|
||||
for depend in depgraph["depends"][pn]:
|
||||
print('"%s" -> "%s"' % (pn, depend), file=depends_file)
|
||||
@@ -642,8 +633,7 @@ class BBCooker:
|
||||
# Calculate priorities for each file
|
||||
matched = set()
|
||||
for p in self.status.pkg_fn:
|
||||
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
|
||||
self.status.bbfile_priority[p] = self.calc_bbfile_priority(realfn, matched)
|
||||
self.status.bbfile_priority[p] = self.calc_bbfile_priority(p, matched)
|
||||
|
||||
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
|
||||
unmatched = set()
|
||||
@@ -939,13 +929,13 @@ class BBCooker:
|
||||
errors = True
|
||||
continue
|
||||
if lver <> depver:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
|
||||
parselog.error("Layer dependency %s of layer %s is at version %d, expected %d", dep, c, lver, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
|
||||
parselog.error("Layer dependency %s of layer %s has no version, expected %d", dep, c, depver)
|
||||
errors = True
|
||||
else:
|
||||
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
|
||||
parselog.error("Layer dependency %s of layer %s not found", dep, c)
|
||||
errors = True
|
||||
collection_depends[c] = depnamelist
|
||||
else:
|
||||
@@ -995,12 +985,12 @@ class BBCooker:
|
||||
"""
|
||||
Find the .bb files which match the expression in 'buildfile'.
|
||||
"""
|
||||
|
||||
if bf.startswith("/") or bf.startswith("../"):
|
||||
bf = os.path.abspath(bf)
|
||||
filelist, masked = self.collect_bbfiles()
|
||||
try:
|
||||
os.stat(bf)
|
||||
bf = os.path.abspath(bf)
|
||||
return [bf]
|
||||
except OSError:
|
||||
regexp = re.compile(bf)
|
||||
@@ -1040,6 +1030,8 @@ class BBCooker:
|
||||
# Parse the configuration here. We need to do it explicitly here since
|
||||
# buildFile() doesn't use the cache
|
||||
self.parseConfiguration()
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
# If we are told to do the None task then query the default task
|
||||
if (task == None):
|
||||
@@ -1061,10 +1053,6 @@ class BBCooker:
|
||||
info_array = infos[fn]
|
||||
except KeyError:
|
||||
bb.fatal("%s does not exist" % fn)
|
||||
|
||||
if info_array[0].skipped:
|
||||
bb.fatal("%s was skipped: %s" % (fn, info_array[0].skipreason))
|
||||
|
||||
self.status.add_from_recipeinfo(fn, info_array)
|
||||
|
||||
# Tweak some variables
|
||||
@@ -1078,10 +1066,10 @@ class BBCooker:
|
||||
self.status.rundeps[fn] = []
|
||||
self.status.runrecs[fn] = []
|
||||
|
||||
# Invalidate task for target if force mode active
|
||||
# Remove stamp for target if force mode active
|
||||
if self.configuration.force:
|
||||
logger.verbose("Invalidate task %s, %s", task, fn)
|
||||
bb.parse.siggen.invalidate_task('do_%s' % task, self.status, fn)
|
||||
logger.verbose("Remove stamp %s, %s", task, fn)
|
||||
bb.build.del_stamp('do_%s' % task, self.status, fn)
|
||||
|
||||
# Setup taskdata structure
|
||||
taskdata = bb.taskdata.TaskData(self.configuration.abort)
|
||||
@@ -1187,18 +1175,24 @@ class BBCooker:
|
||||
return
|
||||
|
||||
if self.state in (state.shutdown, state.stop):
|
||||
self.parser.shutdown(clean=False, force = True)
|
||||
self.parser.shutdown(clean=False)
|
||||
sys.exit(1)
|
||||
|
||||
if self.state != state.parsing:
|
||||
self.parseConfiguration ()
|
||||
|
||||
if self.status:
|
||||
del self.status
|
||||
self.status = bb.cache.CacheData(self.caches_array)
|
||||
|
||||
ignore = self.configuration.data.getVar("ASSUME_PROVIDED", True) or ""
|
||||
self.status.ignored_dependencies = set(ignore.split())
|
||||
|
||||
for dep in self.configuration.extra_assume_provided:
|
||||
self.status.ignored_dependencies.add(dep)
|
||||
|
||||
self.handleCollections( self.configuration.data.getVar("BBFILE_COLLECTIONS", True) )
|
||||
|
||||
(filelist, masked) = self.collect_bbfiles()
|
||||
self.configuration.data.renameVar("__depends", "__base_depends")
|
||||
|
||||
@@ -1207,8 +1201,6 @@ class BBCooker:
|
||||
|
||||
if not self.parser.parse_next():
|
||||
collectlog.debug(1, "parsing complete")
|
||||
if self.parser.error:
|
||||
sys.exit(1)
|
||||
self.show_appends_with_no_recipes()
|
||||
self.buildDepgraph()
|
||||
self.state = state.running
|
||||
@@ -1353,10 +1345,7 @@ class BBCooker:
|
||||
# Empty the environment. The environment will be populated as
|
||||
# necessary from the data store.
|
||||
#bb.utils.empty_environment()
|
||||
try:
|
||||
prserv.serv.auto_start(self.configuration.data)
|
||||
except prserv.serv.PRServiceConfigError:
|
||||
bb.event.fire(CookerExit(), self.configuration.event_data)
|
||||
prserv.serv.auto_start(self.configuration.data)
|
||||
return
|
||||
|
||||
def post_serve(self):
|
||||
@@ -1581,7 +1570,6 @@ class CookerParser(object):
|
||||
def init():
|
||||
Parser.cfg = self.cfgdata
|
||||
multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cfgdata,), exitpriority=1)
|
||||
multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, args=(self.cfgdata,), exitpriority=1)
|
||||
|
||||
self.feeder_quit = multiprocessing.Queue(maxsize=1)
|
||||
self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
|
||||
@@ -1608,7 +1596,6 @@ class CookerParser(object):
|
||||
self.skipped, self.masked,
|
||||
self.virtuals, self.error,
|
||||
self.total)
|
||||
|
||||
bb.event.fire(event, self.cfgdata)
|
||||
self.feeder_quit.put(None)
|
||||
for process in self.processes:
|
||||
@@ -1621,20 +1608,16 @@ class CookerParser(object):
|
||||
self.parser_quit.put(None)
|
||||
|
||||
self.jobs.cancel_join_thread()
|
||||
sys.exit(1)
|
||||
|
||||
for process in self.processes:
|
||||
if force:
|
||||
process.join(.1)
|
||||
process.terminate()
|
||||
else:
|
||||
process.join()
|
||||
process.join()
|
||||
self.feeder.join()
|
||||
|
||||
sync = threading.Thread(target=self.bb_cache.sync)
|
||||
sync.start()
|
||||
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
|
||||
bb.codeparser.parser_cache_savemerge(self.cooker.configuration.data)
|
||||
bb.fetch.fetcher_parse_done(self.cooker.configuration.data)
|
||||
|
||||
def load_cached(self):
|
||||
for filename, appends in self.fromcache:
|
||||
@@ -1658,52 +1641,26 @@ class CookerParser(object):
|
||||
yield result
|
||||
|
||||
def parse_next(self):
|
||||
result = []
|
||||
parsed = None
|
||||
try:
|
||||
parsed, result = self.results.next()
|
||||
except StopIteration:
|
||||
self.shutdown()
|
||||
return False
|
||||
except bb.BBHandledException as exc:
|
||||
self.error += 1
|
||||
logger.error('Failed to parse recipe: %s' % exc.recipe)
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except ParsingFailure as exc:
|
||||
self.error += 1
|
||||
logger.error('Unable to parse %s: %s' %
|
||||
(exc.recipe, bb.exceptions.to_string(exc.realexception)))
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except bb.parse.ParseError as exc:
|
||||
self.error += 1
|
||||
except (bb.parse.ParseError, bb.data_smart.ExpansionError) as exc:
|
||||
logger.error(str(exc))
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except bb.data_smart.ExpansionError as exc:
|
||||
self.error += 1
|
||||
_, value, _ = sys.exc_info()
|
||||
logger.error('ExpansionError during parsing %s: %s', value.recipe, str(exc))
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except SyntaxError as exc:
|
||||
self.error += 1
|
||||
logger.error('Unable to parse %s', exc.recipe)
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
except Exception as exc:
|
||||
self.error += 1
|
||||
etype, value, tb = sys.exc_info()
|
||||
if hasattr(value, "recipe"):
|
||||
logger.error('Unable to parse %s', value.recipe,
|
||||
exc_info=(etype, value, exc.traceback))
|
||||
else:
|
||||
# Most likely, an exception occurred during raising an exception
|
||||
import traceback
|
||||
logger.error('Exception during parse: %s' % traceback.format_exc())
|
||||
logger.error('Unable to parse %s', value.recipe,
|
||||
exc_info=(etype, value, exc.traceback))
|
||||
self.shutdown(clean=False)
|
||||
return False
|
||||
|
||||
self.current += 1
|
||||
self.virtuals += len(result)
|
||||
|
||||
@@ -279,20 +279,13 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
deps = set()
|
||||
vardeps = d.getVarFlag(key, "vardeps", True)
|
||||
try:
|
||||
if key[-1] == ']':
|
||||
vf = key[:-1].split('[')
|
||||
value = d.getVarFlag(vf[0], vf[1], False)
|
||||
else:
|
||||
value = d.getVar(key, False)
|
||||
|
||||
value = d.getVar(key, False)
|
||||
if key in vardepvals:
|
||||
value = d.getVarFlag(key, "vardepvalue", True)
|
||||
elif d.getVarFlag(key, "func"):
|
||||
if d.getVarFlag(key, "python"):
|
||||
parsedvar = d.expandWithRefs(value, key)
|
||||
parser = bb.codeparser.PythonParser(key, logger)
|
||||
if parsedvar.value and "\t" in parsedvar.value:
|
||||
logger.warn("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True)))
|
||||
parser.parse_python(parsedvar.value)
|
||||
deps = deps | parser.references
|
||||
else:
|
||||
@@ -308,23 +301,11 @@ def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||
parser = d.expandWithRefs(value, key)
|
||||
deps |= parser.references
|
||||
deps = deps | (keys & parser.execs)
|
||||
|
||||
# Add varflags, assuming an exclusion list is set
|
||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True)
|
||||
if varflagsexcl:
|
||||
varfdeps = []
|
||||
varflags = d.getVarFlags(key)
|
||||
if varflags:
|
||||
for f in varflags:
|
||||
if f not in varflagsexcl:
|
||||
varfdeps.append('%s[%s]' % (key, f))
|
||||
if varfdeps:
|
||||
deps |= set(varfdeps)
|
||||
|
||||
deps |= set((vardeps or "").split())
|
||||
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
|
||||
except Exception as e:
|
||||
raise bb.data_smart.ExpansionError(key, None, e)
|
||||
except:
|
||||
bb.note("Error expanding variable %s" % key)
|
||||
raise
|
||||
return deps, value
|
||||
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
|
||||
#d.setVarFlag(key, "vardeps", deps)
|
||||
|
||||
@@ -39,7 +39,7 @@ from bb.COW import COWDictBase
|
||||
logger = logging.getLogger("BitBake.Data")
|
||||
|
||||
__setvar_keyword__ = ["_append", "_prepend"]
|
||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?$')
|
||||
__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
|
||||
__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
|
||||
__expand_python_regexp__ = re.compile(r"\${@.+?}")
|
||||
|
||||
@@ -102,13 +102,7 @@ class ExpansionError(Exception):
|
||||
self.expression = expression
|
||||
self.variablename = varname
|
||||
self.exception = exception
|
||||
if varname:
|
||||
if expression:
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
else:
|
||||
self.msg = "Failure expanding variable %s: %s: %s" % (varname, type(exception).__name__, exception)
|
||||
else:
|
||||
self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
|
||||
self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
|
||||
Exception.__init__(self, self.msg)
|
||||
self.args = (varname, expression, exception)
|
||||
def __str__(self):
|
||||
@@ -201,12 +195,7 @@ class DataSmart(MutableMapping):
|
||||
for append in appends:
|
||||
keep = []
|
||||
for (a, o) in self.getVarFlag(append, op) or []:
|
||||
match = True
|
||||
if o:
|
||||
for o2 in o.split("_"):
|
||||
if not o2 in overrides:
|
||||
match = False
|
||||
if not match:
|
||||
if o and not o in overrides:
|
||||
keep.append((a ,o))
|
||||
continue
|
||||
|
||||
@@ -283,10 +272,10 @@ class DataSmart(MutableMapping):
|
||||
self._seen_overrides[override].add( var )
|
||||
|
||||
# setting var
|
||||
self.dict[var]["_content"] = value
|
||||
self.dict[var]["content"] = value
|
||||
|
||||
def getVar(self, var, expand=False, noweakdefault=False):
|
||||
value = self.getVarFlag(var, "_content", False, noweakdefault)
|
||||
value = self.getVarFlag(var, "content", False, noweakdefault)
|
||||
|
||||
# Call expand() separately to make use of the expand cache
|
||||
if expand and value:
|
||||
@@ -343,7 +332,7 @@ class DataSmart(MutableMapping):
|
||||
if local_var:
|
||||
if flag in local_var:
|
||||
value = copy.copy(local_var[flag])
|
||||
elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
|
||||
elif flag == "content" and "defaultval" in local_var and not noweakdefault:
|
||||
value = copy.copy(local_var["defaultval"])
|
||||
if expand and value:
|
||||
value = self.expand(value, None)
|
||||
@@ -372,7 +361,7 @@ class DataSmart(MutableMapping):
|
||||
self._makeShadowCopy(var)
|
||||
|
||||
for i in flags:
|
||||
if i == "_content":
|
||||
if i == "content":
|
||||
continue
|
||||
self.dict[var][i] = flags[i]
|
||||
|
||||
@@ -382,7 +371,7 @@ class DataSmart(MutableMapping):
|
||||
|
||||
if local_var:
|
||||
for i in local_var:
|
||||
if i.startswith("_"):
|
||||
if i == "content":
|
||||
continue
|
||||
flags[i] = local_var[i]
|
||||
|
||||
@@ -399,10 +388,10 @@ class DataSmart(MutableMapping):
|
||||
content = None
|
||||
|
||||
# try to save the content
|
||||
if "_content" in self.dict[var]:
|
||||
content = self.dict[var]["_content"]
|
||||
if "content" in self.dict[var]:
|
||||
content = self.dict[var]["content"]
|
||||
self.dict[var] = {}
|
||||
self.dict[var]["_content"] = content
|
||||
self.dict[var]["content"] = content
|
||||
else:
|
||||
del self.dict[var]
|
||||
|
||||
@@ -473,18 +462,13 @@ class DataSmart(MutableMapping):
|
||||
self.delVar(var)
|
||||
|
||||
def get_hash(self):
|
||||
data = {}
|
||||
d = self.createCopy()
|
||||
bb.data.expandKeys(d)
|
||||
bb.data.update_data(d)
|
||||
|
||||
config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
|
||||
keys = set(key for key in iter(d) if not key.startswith("__"))
|
||||
data = ""
|
||||
config_whitelist = set((self.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split())
|
||||
keys = set(key for key in iter(self) if not key.startswith("__"))
|
||||
for key in keys:
|
||||
if key in config_whitelist:
|
||||
continue
|
||||
value = d.getVar(key, False) or ""
|
||||
data.update({key:value})
|
||||
value = self.getVar(key, False) or ""
|
||||
data = data + key + ': ' + str(value) + '\n'
|
||||
|
||||
data_str = str([(k, data[k]) for k in sorted(data.keys())])
|
||||
return hashlib.md5(data_str).hexdigest()
|
||||
return hashlib.md5(data).hexdigest()
|
||||
|
||||
@@ -32,7 +32,6 @@ import logging
|
||||
import atexit
|
||||
import traceback
|
||||
import bb.utils
|
||||
import bb.compat
|
||||
|
||||
# This is the pid for which we should generate the event. This is set when
|
||||
# the runqueue forks off.
|
||||
@@ -54,7 +53,7 @@ Registered = 10
|
||||
AlreadyRegistered = 14
|
||||
|
||||
# Internal
|
||||
_handlers = bb.compat.OrderedDict()
|
||||
_handlers = {}
|
||||
_ui_handlers = {}
|
||||
_ui_handler_seq = 0
|
||||
|
||||
@@ -105,18 +104,6 @@ def print_ui_queue():
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||
logger.handlers = [console]
|
||||
|
||||
# First check to see if we have any proper messages
|
||||
msgprint = False
|
||||
for event in ui_queue:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno > logging.DEBUG:
|
||||
logger.handle(event)
|
||||
msgprint = True
|
||||
if msgprint:
|
||||
return
|
||||
|
||||
# Nope, so just print all of the messages we have (including debug messages)
|
||||
for event in ui_queue:
|
||||
if isinstance(event, logging.LogRecord):
|
||||
logger.handle(event)
|
||||
@@ -188,7 +175,7 @@ def register(name, handler):
|
||||
_handlers[name] = noop
|
||||
return
|
||||
env = {}
|
||||
bb.utils.better_exec(code, env)
|
||||
bb.utils.simple_exec(code, env)
|
||||
func = bb.utils.better_eval(name, env)
|
||||
_handlers[name] = func
|
||||
else:
|
||||
@@ -325,14 +312,6 @@ class BuildCompleted(BuildBase, OperationCompleted):
|
||||
OperationCompleted.__init__(self, total, "Building Failed")
|
||||
BuildBase.__init__(self, n, p, failures)
|
||||
|
||||
class DiskFull(Event):
|
||||
"""Disk full case build aborted"""
|
||||
def __init__(self, dev, type, freespace, mountpoint):
|
||||
Event.__init__(self)
|
||||
self._dev = dev
|
||||
self._type = type
|
||||
self._free = freespace
|
||||
self._mountpoint = mountpoint
|
||||
|
||||
class NoProvider(Event):
|
||||
"""No Provider for an Event"""
|
||||
@@ -510,15 +489,6 @@ class MsgFatal(MsgBase):
|
||||
class MsgPlain(MsgBase):
|
||||
"""General output"""
|
||||
|
||||
class LogExecTTY(Event):
|
||||
"""Send event containing program to spawn on tty of the logger"""
|
||||
def __init__(self, msg, prog, sleep_delay, retries):
|
||||
Event.__init__(self)
|
||||
self.msg = msg
|
||||
self.prog = prog
|
||||
self.sleep_delay = sleep_delay
|
||||
self.retries = retries
|
||||
|
||||
class LogHandler(logging.Handler):
|
||||
"""Dispatch logging messages as bitbake events"""
|
||||
|
||||
@@ -547,22 +517,3 @@ class PackageInfo(Event):
|
||||
def __init__(self, pkginfolist):
|
||||
Event.__init__(self)
|
||||
self._pkginfolist = pkginfolist
|
||||
|
||||
class SanityCheck(Event):
|
||||
"""
|
||||
Event to issue sanity check
|
||||
"""
|
||||
|
||||
class SanityCheckPassed(Event):
|
||||
"""
|
||||
Event to indicate sanity check is passed
|
||||
"""
|
||||
|
||||
class SanityCheckFailed(Event):
|
||||
"""
|
||||
Event to indicate sanity check has failed
|
||||
"""
|
||||
def __init__(self, msg, network_error=False):
|
||||
Event.__init__(self)
|
||||
self._msg = msg
|
||||
self._network_error = network_error
|
||||
|
||||
@@ -32,14 +32,7 @@ class TracebackEntry(namedtuple.abc):
|
||||
def _get_frame_args(frame):
|
||||
"""Get the formatted arguments and class (if available) for a frame"""
|
||||
arginfo = inspect.getargvalues(frame)
|
||||
|
||||
try:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
# There have been reports from the field of python 2.6 which doesn't
|
||||
# return a namedtuple here but simply a tuple so fallback gracefully if
|
||||
# args isn't present.
|
||||
except AttributeError:
|
||||
if not arginfo.args:
|
||||
return '', None
|
||||
|
||||
firstarg = arginfo.args[0]
|
||||
|
||||
@@ -8,7 +8,6 @@ BitBake build tools.
|
||||
"""
|
||||
|
||||
# Copyright (C) 2003, 2004 Chris Larson
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
@@ -29,13 +28,10 @@ from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
import os, re
|
||||
import logging
|
||||
import urllib
|
||||
import bb.persist_data, bb.utils
|
||||
import bb.checksum
|
||||
from bb import data
|
||||
|
||||
__version__ = "2"
|
||||
_checksum_cache = bb.checksum.FileChecksumCache()
|
||||
|
||||
logger = logging.getLogger("BitBake.Fetcher")
|
||||
|
||||
@@ -54,7 +50,7 @@ class MalformedUrl(BBFetchException):
|
||||
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
self.args = url
|
||||
|
||||
class FetchError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly"""
|
||||
@@ -67,12 +63,6 @@ class FetchError(BBFetchException):
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (message, url)
|
||||
|
||||
class ChecksumError(FetchError):
|
||||
"""Exception when mismatched checksum encountered"""
|
||||
|
||||
class NoChecksumError(FetchError):
|
||||
"""Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
|
||||
|
||||
class UnpackError(BBFetchException):
|
||||
"""General fetcher exception when something happens incorrectly when unpacking"""
|
||||
def __init__(self, message, url):
|
||||
@@ -87,7 +77,7 @@ class NoMethodError(BBFetchException):
|
||||
msg = "Could not find a fetcher which supports the URL: '%s'" % url
|
||||
self.url = url
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url,)
|
||||
self.args = url
|
||||
|
||||
class MissingParameterError(BBFetchException):
|
||||
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
||||
@@ -109,15 +99,12 @@ class ParameterError(BBFetchException):
|
||||
class NetworkAccess(BBFetchException):
|
||||
"""Exception raised when network access is disabled but it is required."""
|
||||
def __init__(self, url, cmd):
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access requested with command %s (for url %s)" % (cmd, url)
|
||||
msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url)
|
||||
self.url = url
|
||||
self.cmd = cmd
|
||||
BBFetchException.__init__(self, msg)
|
||||
self.args = (url, cmd)
|
||||
|
||||
class NonLocalMethod(Exception):
|
||||
def __init__(self):
|
||||
Exception.__init__(self)
|
||||
|
||||
def decodeurl(url):
|
||||
"""Decodes an URL into the tokens (scheme, network location, path,
|
||||
@@ -157,14 +144,14 @@ def decodeurl(url):
|
||||
s1, s2 = s.split('=')
|
||||
p[s1] = s2
|
||||
|
||||
return type, host, urllib.unquote(path), user, pswd, p
|
||||
return (type, host, path, user, pswd, p)
|
||||
|
||||
def encodeurl(decoded):
|
||||
"""Encodes a URL from tokens (scheme, network location, path,
|
||||
user, password, parameters).
|
||||
"""
|
||||
|
||||
type, host, path, user, pswd, p = decoded
|
||||
(type, host, path, user, pswd, p) = decoded
|
||||
|
||||
if not path:
|
||||
raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
|
||||
@@ -178,67 +165,43 @@ def encodeurl(decoded):
|
||||
url += "@"
|
||||
if host and type != "file":
|
||||
url += "%s" % host
|
||||
# Standardise path to ensure comparisons work
|
||||
while '//' in path:
|
||||
path = path.replace("//", "/")
|
||||
url += "%s" % urllib.quote(path)
|
||||
url += "%s" % path
|
||||
if p:
|
||||
for parm in p:
|
||||
url += ";%s=%s" % (parm, p[parm])
|
||||
|
||||
return url
|
||||
|
||||
def uri_replace(ud, uri_find, uri_replace, replacements, d):
|
||||
def uri_replace(ud, uri_find, uri_replace, d):
|
||||
if not ud.url or not uri_find or not uri_replace:
|
||||
logger.error("uri_replace: passed an undefined value, not replacing")
|
||||
return None
|
||||
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
||||
uri_decoded = list(decodeurl(ud.url))
|
||||
uri_find_decoded = list(decodeurl(uri_find))
|
||||
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||
logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
|
||||
result_decoded = ['', '', '', '', '', {}]
|
||||
for loc, i in enumerate(uri_find_decoded):
|
||||
for i in uri_find_decoded:
|
||||
loc = uri_find_decoded.index(i)
|
||||
result_decoded[loc] = uri_decoded[loc]
|
||||
regexp = i
|
||||
if loc == 0 and regexp and not regexp.endswith("$"):
|
||||
# Leaving the type unanchored can mean "https" matching "file" can become "files"
|
||||
# which is clearly undesirable.
|
||||
regexp += "$"
|
||||
if loc == 5:
|
||||
# Handle URL parameters
|
||||
if i:
|
||||
# Any specified URL parameters must match
|
||||
for k in uri_replace_decoded[loc]:
|
||||
if uri_decoded[loc][k] != uri_replace_decoded[loc][k]:
|
||||
return None
|
||||
# Overwrite any specified replacement parameters
|
||||
for k in uri_replace_decoded[loc]:
|
||||
result_decoded[loc][k] = uri_replace_decoded[loc][k]
|
||||
elif (re.match(regexp, uri_decoded[loc])):
|
||||
if not uri_replace_decoded[loc]:
|
||||
result_decoded[loc] = ""
|
||||
if isinstance(i, basestring):
|
||||
if (re.match(i, uri_decoded[loc])):
|
||||
if not uri_replace_decoded[loc]:
|
||||
result_decoded[loc] = ""
|
||||
else:
|
||||
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if uri_find_decoded.index(i) == 2:
|
||||
basename = None
|
||||
if ud.mirrortarball:
|
||||
basename = os.path.basename(ud.mirrortarball)
|
||||
elif ud.localpath:
|
||||
basename = os.path.basename(ud.localpath)
|
||||
if basename and result_decoded[loc].endswith("/"):
|
||||
result_decoded[loc] = os.path.dirname(result_decoded[loc])
|
||||
if basename and not result_decoded[loc].endswith(basename):
|
||||
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
|
||||
else:
|
||||
for k in replacements:
|
||||
uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
|
||||
#bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
|
||||
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc])
|
||||
if loc == 2:
|
||||
# Handle path manipulations
|
||||
basename = None
|
||||
if uri_decoded[0] != uri_replace_decoded[0] and ud.mirrortarball:
|
||||
# If the source and destination url types differ, must be a mirrortarball mapping
|
||||
basename = os.path.basename(ud.mirrortarball)
|
||||
# Kill parameters, they make no sense for mirror tarballs
|
||||
uri_decoded[5] = {}
|
||||
elif ud.localpath and ud.method.supports_checksum(ud):
|
||||
basename = os.path.basename(ud.localpath)
|
||||
if basename and not result_decoded[loc].endswith(basename):
|
||||
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
|
||||
else:
|
||||
return None
|
||||
return ud.url
|
||||
result = encodeurl(result_decoded)
|
||||
if result == ud.url:
|
||||
return None
|
||||
logger.debug(2, "For url %s returning %s" % (ud.url, result))
|
||||
return result
|
||||
|
||||
@@ -266,18 +229,10 @@ def fetcher_init(d):
|
||||
else:
|
||||
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||
|
||||
_checksum_cache.init_cache(d)
|
||||
|
||||
for m in methods:
|
||||
if hasattr(m, "init"):
|
||||
m.init(d)
|
||||
|
||||
def fetcher_parse_save(d):
|
||||
_checksum_cache.save_extras(d)
|
||||
|
||||
def fetcher_parse_done(d):
|
||||
_checksum_cache.save_merge(d)
|
||||
|
||||
def fetcher_compare_revisions(d):
|
||||
"""
|
||||
Compare the revisions in the persistant cache with current values and
|
||||
@@ -304,37 +259,39 @@ def verify_checksum(u, ud, d):
|
||||
"""
|
||||
verify the MD5 and SHA256 checksum for downloaded src
|
||||
|
||||
Raises a FetchError if one or both of the SRC_URI checksums do not match
|
||||
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
|
||||
checksums specified.
|
||||
return value:
|
||||
- True: a checksum matched
|
||||
- False: neither checksum matched
|
||||
|
||||
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||
matched
|
||||
"""
|
||||
|
||||
if not ud.method.supports_checksum(ud):
|
||||
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
||||
return
|
||||
|
||||
md5data = bb.utils.md5_file(ud.localpath)
|
||||
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||
|
||||
if ud.method.recommends_checksum(ud):
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise NoChecksumError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
# If strict checking enabled and neither sum defined, raise error
|
||||
strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
|
||||
if (strict and ud.md5_expected == None and ud.sha256_expected == None):
|
||||
raise FetchError('No checksum specified for %s, please add at least one to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
|
||||
(ud.localpath, ud.md5_name, md5data,
|
||||
ud.sha256_name, sha256data), u)
|
||||
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
# Log missing sums so user can more easily add them
|
||||
if ud.md5_expected == None:
|
||||
logger.warn('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.md5_name, md5data)
|
||||
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
if ud.sha256_expected == None:
|
||||
logger.warn('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||
'SRC_URI[%s] = "%s"',
|
||||
ud.localpath, ud.sha256_name, sha256data)
|
||||
|
||||
md5mismatch = False
|
||||
sha256mismatch = False
|
||||
@@ -348,20 +305,14 @@ def verify_checksum(u, ud, d):
|
||||
# We want to alert the user if a checksum is defined in the recipe but
|
||||
# it does not match.
|
||||
msg = ""
|
||||
mismatch = False
|
||||
if md5mismatch and ud.md5_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
|
||||
mismatch = True;
|
||||
|
||||
if sha256mismatch and ud.sha256_expected:
|
||||
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
|
||||
mismatch = True;
|
||||
|
||||
if mismatch:
|
||||
msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
|
||||
|
||||
if len(msg):
|
||||
raise ChecksumError('Checksum mismatch!%s' % msg, u)
|
||||
raise FetchError('Checksum mismatch!%s' % msg, u)
|
||||
|
||||
|
||||
def update_stamp(u, ud, d):
|
||||
@@ -470,16 +421,11 @@ def runfetchcmd(cmd, d, quiet = False, cleanup = []):
|
||||
success = True
|
||||
except bb.process.NotFoundError as e:
|
||||
error_message = "Fetch command %s" % (e.command)
|
||||
except bb.process.ExecutionError as e:
|
||||
if e.stdout:
|
||||
output = "output:\n%s\n%s" % (e.stdout, e.stderr)
|
||||
elif e.stderr:
|
||||
output = "output:\n%s" % e.stderr
|
||||
else:
|
||||
output = "no output"
|
||||
error_message = "Fetch command failed with exit code %s, %s" % (e.exitcode, output)
|
||||
except bb.process.CmdError as e:
|
||||
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
|
||||
except bb.process.ExecutionError as e:
|
||||
error_message = "Fetch command %s failed with exit code %s, output:\n%s" % (e.command, e.exitcode, e.stderr)
|
||||
|
||||
if not success:
|
||||
for f in cleanup:
|
||||
try:
|
||||
@@ -500,107 +446,6 @@ def check_network_access(d, info = "", url = None):
|
||||
else:
|
||||
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
|
||||
|
||||
def build_mirroruris(origud, mirrors, ld):
|
||||
uris = []
|
||||
uds = []
|
||||
|
||||
replacements = {}
|
||||
replacements["TYPE"] = origud.type
|
||||
replacements["HOST"] = origud.host
|
||||
replacements["PATH"] = origud.path
|
||||
replacements["BASENAME"] = origud.path.split("/")[-1]
|
||||
replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
|
||||
|
||||
def adduri(uri, ud, uris, uds):
|
||||
for line in mirrors:
|
||||
try:
|
||||
(find, replace) = line
|
||||
except ValueError:
|
||||
continue
|
||||
newuri = uri_replace(ud, find, replace, replacements, ld)
|
||||
if not newuri or newuri in uris or newuri == origud.url:
|
||||
continue
|
||||
try:
|
||||
newud = FetchData(newuri, ld)
|
||||
newud.setup_localpath(ld)
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
uris.append(newuri)
|
||||
uds.append(newud)
|
||||
|
||||
adduri(newuri, newud, uris, uds)
|
||||
|
||||
adduri(None, origud, uris, uds)
|
||||
|
||||
return uris, uds
|
||||
|
||||
def try_mirror_url(newuri, origud, ud, ld, check = False):
|
||||
# Return of None or a value means we're finished
|
||||
# False means try another url
|
||||
try:
|
||||
if check:
|
||||
found = ud.method.checkstatus(newuri, ud, ld)
|
||||
if found:
|
||||
return found
|
||||
return False
|
||||
|
||||
os.chdir(ld.getVar("DL_DIR", True))
|
||||
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
|
||||
if not ud.localpath or not os.path.exists(ud.localpath):
|
||||
return False
|
||||
|
||||
if ud.localpath == origud.localpath:
|
||||
return ud.localpath
|
||||
|
||||
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
|
||||
# If that tarball is a local file:// we need to provide a symlink to it
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
|
||||
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
open(ud.donestamp, 'w').close()
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
return None
|
||||
# Otherwise the result is a local file:// and we symlink to it
|
||||
if not os.path.exists(origud.localpath):
|
||||
if os.path.islink(origud.localpath):
|
||||
# Broken symbolic link
|
||||
os.unlink(origud.localpath)
|
||||
|
||||
os.symlink(ud.localpath, origud.localpath)
|
||||
update_stamp(newuri, origud, ld)
|
||||
return ud.localpath
|
||||
|
||||
except bb.fetch2.NetworkAccess:
|
||||
raise
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (newuri, origud.url))
|
||||
logger.warn(str(e))
|
||||
elif isinstance(e, NoChecksumError):
|
||||
raise
|
||||
else:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
ud.method.clean(ud, ld)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def try_mirrors(d, origud, mirrors, check = False):
|
||||
"""
|
||||
Try to use a mirrored version of the sources.
|
||||
@@ -611,13 +456,62 @@ def try_mirrors(d, origud, mirrors, check = False):
|
||||
mirrors is the list of mirrors we're going to try
|
||||
"""
|
||||
ld = d.createCopy()
|
||||
for line in mirrors:
|
||||
try:
|
||||
(find, replace) = line
|
||||
except ValueError:
|
||||
continue
|
||||
newuri = uri_replace(origud, find, replace, ld)
|
||||
if newuri == origud.url:
|
||||
continue
|
||||
try:
|
||||
ud = FetchData(newuri, ld)
|
||||
ud.setup_localpath(ld)
|
||||
|
||||
uris, uds = build_mirroruris(origud, mirrors, ld)
|
||||
if check:
|
||||
found = ud.method.checkstatus(newuri, ud, ld)
|
||||
if found:
|
||||
return found
|
||||
continue
|
||||
|
||||
for index, uri in enumerate(uris):
|
||||
ret = try_mirror_url(uri, origud, uds[index], ld, check)
|
||||
if ret != False:
|
||||
return ret
|
||||
if not os.path.exists(ud.donestamp) or ud.method.need_update(newuri, ud, ld):
|
||||
ud.method.download(newuri, ud, ld)
|
||||
if os.path.exists(ud.localpath):
|
||||
open(ud.donestamp, 'w').close()
|
||||
if hasattr(ud.method,"build_mirror_data"):
|
||||
ud.method.build_mirror_data(newuri, ud, ld)
|
||||
|
||||
if not ud.localpath or not os.path.exists(ud.localpath):
|
||||
continue
|
||||
|
||||
if ud.localpath == origud.localpath:
|
||||
return ud.localpath
|
||||
|
||||
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
|
||||
# If that tarball is a local file:// we need to provide a symlink to it
|
||||
dldir = ld.getVar("DL_DIR", True)
|
||||
if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
|
||||
dest = os.path.join(dldir, os.path.basename(ud.localpath))
|
||||
if not os.path.exists(dest):
|
||||
os.symlink(ud.localpath, dest)
|
||||
return None
|
||||
# Otherwise the result is a local file:// and we symlink to it
|
||||
if not os.path.exists(origud.localpath):
|
||||
os.symlink(ud.localpath, origud.localpath)
|
||||
return ud.localpath
|
||||
|
||||
except bb.fetch2.NetworkAccess:
|
||||
raise
|
||||
|
||||
except bb.fetch2.BBFetchException as e:
|
||||
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
|
||||
logger.debug(1, str(e))
|
||||
try:
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
except UnboundLocalError:
|
||||
pass
|
||||
continue
|
||||
return None
|
||||
|
||||
def srcrev_internal_helper(ud, d, name):
|
||||
@@ -651,85 +545,11 @@ def srcrev_internal_helper(ud, d, name):
|
||||
|
||||
return rev
|
||||
|
||||
|
||||
def get_checksum_file_list(d):
|
||||
""" Get a list of files checksum in SRC_URI
|
||||
|
||||
Returns the resolved local paths of all local file entries in
|
||||
SRC_URI as a space-separated string
|
||||
"""
|
||||
fetch = Fetch([], d, cache = False, localonly = True)
|
||||
|
||||
dl_dir = d.getVar('DL_DIR', True)
|
||||
filelist = []
|
||||
for u in fetch.urls:
|
||||
ud = fetch.ud[u]
|
||||
|
||||
if ud and isinstance(ud.method, local.Local):
|
||||
ud.setup_localpath(d)
|
||||
f = ud.localpath
|
||||
if f.startswith(dl_dir):
|
||||
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
|
||||
if os.path.exists(f):
|
||||
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
else:
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
|
||||
continue
|
||||
filelist.append(f)
|
||||
|
||||
return " ".join(filelist)
|
||||
|
||||
|
||||
def get_file_checksums(filelist, pn):
|
||||
"""Get a list of the checksums for a list of local files
|
||||
|
||||
Returns the checksums for a list of local files, caching the results as
|
||||
it proceeds
|
||||
|
||||
"""
|
||||
|
||||
def checksum_file(f):
|
||||
try:
|
||||
checksum = _checksum_cache.get_checksum(f)
|
||||
except OSError as e:
|
||||
import traceback
|
||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
||||
return None
|
||||
return checksum
|
||||
|
||||
checksums = []
|
||||
for pth in filelist.split():
|
||||
checksum = None
|
||||
if '*' in pth:
|
||||
# Handle globs
|
||||
import glob
|
||||
for f in glob.glob(pth):
|
||||
checksum = checksum_file(f)
|
||||
if checksum:
|
||||
checksums.append((f, checksum))
|
||||
elif os.path.isdir(pth):
|
||||
# Handle directories
|
||||
for root, dirs, files in os.walk(pth):
|
||||
for name in files:
|
||||
fullpth = os.path.join(root, name)
|
||||
checksum = checksum_file(fullpth)
|
||||
if checksum:
|
||||
checksums.append((fullpth, checksum))
|
||||
else:
|
||||
checksum = checksum_file(pth)
|
||||
|
||||
if checksum:
|
||||
checksums.append((pth, checksum))
|
||||
|
||||
checksums.sort()
|
||||
return checksums
|
||||
|
||||
|
||||
class FetchData(object):
|
||||
"""
|
||||
A class which represents the fetcher state for a given URI.
|
||||
"""
|
||||
def __init__(self, url, d, localonly = False):
|
||||
def __init__(self, url, d):
|
||||
# localpath is the location of a downloaded result. If not set, the file is local.
|
||||
self.donestamp = None
|
||||
self.localfile = ""
|
||||
@@ -754,14 +574,10 @@ class FetchData(object):
|
||||
self.sha256_name = "sha256sum"
|
||||
if self.md5_name in self.parm:
|
||||
self.md5_expected = self.parm[self.md5_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.md5_expected = None
|
||||
else:
|
||||
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
|
||||
if self.sha256_name in self.parm:
|
||||
self.sha256_expected = self.parm[self.sha256_name]
|
||||
elif self.type not in ["http", "https", "ftp", "ftps"]:
|
||||
self.sha256_expected = None
|
||||
else:
|
||||
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
|
||||
|
||||
@@ -776,13 +592,6 @@ class FetchData(object):
|
||||
if not self.method:
|
||||
raise NoMethodError(url)
|
||||
|
||||
if localonly and not isinstance(self.method, local.Local):
|
||||
raise NonLocalMethod()
|
||||
|
||||
if self.parm.get("proto", None) and "protocol" not in self.parm:
|
||||
logger.warn('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True))
|
||||
self.parm["protocol"] = self.parm.get("proto", None)
|
||||
|
||||
if hasattr(self.method, "urldata_init"):
|
||||
self.method.urldata_init(self, d)
|
||||
|
||||
@@ -847,26 +656,6 @@ class FetchMethod(object):
|
||||
"""
|
||||
return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
"""
|
||||
Is localpath something that can be represented by a checksum?
|
||||
"""
|
||||
|
||||
# We cannot compute checksums for directories
|
||||
if os.path.isdir(urldata.localpath) == True:
|
||||
return False
|
||||
if urldata.localpath.find("*") != -1:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
"""
|
||||
Is the backend on where checksumming is recommended (should warnings
|
||||
by displayed if there is no checksum)?
|
||||
"""
|
||||
return False
|
||||
|
||||
def _strip_leading_slashes(self, relpath):
|
||||
"""
|
||||
Remove leading slash as os.path.join can't cope
|
||||
@@ -917,7 +706,7 @@ class FetchMethod(object):
|
||||
|
||||
dots = file.split(".")
|
||||
if dots[-1] in ['gz', 'bz2', 'Z']:
|
||||
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
|
||||
efile = os.path.join(data.getVar('WORKDIR', True),os.path.basename('.'.join(dots[0:-1])))
|
||||
else:
|
||||
efile = file
|
||||
cmd = None
|
||||
@@ -947,16 +736,14 @@ class FetchMethod(object):
|
||||
if dos:
|
||||
cmd = '%s -a' % cmd
|
||||
cmd = "%s '%s'" % (cmd, file)
|
||||
elif file.endswith('.rpm') or file.endswith('.srpm'):
|
||||
elif file.endswith('.src.rpm') or file.endswith('.srpm'):
|
||||
if 'extract' in urldata.parm:
|
||||
unpack_file = urldata.parm.get('extract')
|
||||
cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
|
||||
cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file)
|
||||
iterate = True
|
||||
iterate_file = unpack_file
|
||||
else:
|
||||
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
|
||||
elif file.endswith('.deb') or file.endswith('.ipk'):
|
||||
cmd = 'ar -p %s data.tar.gz | zcat | tar --no-same-owner -xpf -' % file
|
||||
cmd = 'rpm2cpio.sh %s | cpio -i' % (file)
|
||||
|
||||
if not unpack or not cmd:
|
||||
# If file == dest, then avoid any copies, as we already put the file into dest!
|
||||
@@ -995,9 +782,7 @@ class FetchMethod(object):
|
||||
bb.utils.mkdirhier(newdir)
|
||||
os.chdir(newdir)
|
||||
|
||||
path = data.getVar('PATH', True)
|
||||
if path:
|
||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
||||
cmd = "PATH=\"%s\" %s" % (data.getVar('PATH', True), cmd)
|
||||
bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
|
||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
|
||||
|
||||
@@ -1108,10 +893,7 @@ class FetchMethod(object):
|
||||
return "%s-%s" % (key, d.getVar("PN", True) or "")
|
||||
|
||||
class Fetch(object):
|
||||
def __init__(self, urls, d, cache = True, localonly = False):
|
||||
if localonly and cache:
|
||||
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
|
||||
|
||||
def __init__(self, urls, d, cache = True):
|
||||
if len(urls) == 0:
|
||||
urls = d.getVar("SRC_URI", True).split()
|
||||
self.urls = urls
|
||||
@@ -1119,19 +901,14 @@ class Fetch(object):
|
||||
self.ud = {}
|
||||
|
||||
fn = d.getVar('FILE', True)
|
||||
if cache and fn and fn in urldata_cache:
|
||||
if cache and fn in urldata_cache:
|
||||
self.ud = urldata_cache[fn]
|
||||
|
||||
for url in urls:
|
||||
if url not in self.ud:
|
||||
try:
|
||||
self.ud[url] = FetchData(url, d, localonly)
|
||||
except NonLocalMethod:
|
||||
if localonly:
|
||||
self.ud[url] = None
|
||||
pass
|
||||
self.ud[url] = FetchData(url, d)
|
||||
|
||||
if fn and cache:
|
||||
if cache:
|
||||
urldata_cache[fn] = self.ud
|
||||
|
||||
def localpath(self, url):
|
||||
@@ -1185,8 +962,6 @@ class Fetch(object):
|
||||
if premirroronly:
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
|
||||
os.chdir(self.d.getVar("DL_DIR", True))
|
||||
|
||||
firsterr = None
|
||||
if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(u, ud, self.d)):
|
||||
try:
|
||||
@@ -1203,17 +978,12 @@ class Fetch(object):
|
||||
raise
|
||||
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, ChecksumError):
|
||||
logger.warn("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
|
||||
logger.debug(1, str(e))
|
||||
elif isinstance(e, NoChecksumError):
|
||||
raise
|
||||
else:
|
||||
logger.warn('Failed to fetch URL %s, attempting MIRRORS if available' % u)
|
||||
logger.debug(1, str(e))
|
||||
logger.warn('Failed to fetch URL %s' % u)
|
||||
logger.debug(1, str(e))
|
||||
firsterr = e
|
||||
# Remove any incomplete fetch
|
||||
m.clean(ud, self.d)
|
||||
if os.path.isfile(ud.localpath):
|
||||
bb.utils.remove(ud.localpath)
|
||||
logger.debug(1, "Trying MIRRORS")
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
localpath = try_mirrors (self.d, ud, mirrors)
|
||||
@@ -1225,13 +995,6 @@ class Fetch(object):
|
||||
|
||||
update_stamp(u, ud, self.d)
|
||||
|
||||
except BBFetchException as e:
|
||||
if isinstance(e, NoChecksumError):
|
||||
logger.error("%s" % str(e))
|
||||
elif isinstance(e, ChecksumError):
|
||||
logger.error("Checksum failure fetching %s" % u)
|
||||
raise
|
||||
|
||||
finally:
|
||||
bb.utils.unlockfile(lf)
|
||||
|
||||
@@ -1258,7 +1021,7 @@ class Fetch(object):
|
||||
except:
|
||||
# Finally, try checking uri, u, from MIRRORS
|
||||
mirrors = mirror_from_string(self.d.getVar('MIRRORS', True))
|
||||
ret = try_mirrors(self.d, ud, mirrors, True)
|
||||
ret = try_mirrors (self.d, ud, mirrors, True)
|
||||
|
||||
if not ret:
|
||||
raise FetchError("URL %s doesn't work" % u, u)
|
||||
|
||||
@@ -60,7 +60,7 @@ class Bzr(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
bzrroot = ud.host + ud.path
|
||||
|
||||
@@ -73,7 +73,7 @@ class Bzr(FetchMethod):
|
||||
options.append("-r %s" % ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||
elif command == "update":
|
||||
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
||||
else:
|
||||
|
||||
@@ -29,6 +29,7 @@ BitBake build tools.
|
||||
import os
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
|
||||
from bb.fetch2 import runfetchcmd
|
||||
|
||||
@@ -63,7 +64,7 @@ class Cvs(FetchMethod):
|
||||
if 'fullpath' in ud.parm:
|
||||
fullpath = '_fullpath'
|
||||
|
||||
ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||
|
||||
def need_update(self, url, ud, d):
|
||||
if (ud.date == "now"):
|
||||
@@ -87,10 +88,10 @@ class Cvs(FetchMethod):
|
||||
cvsroot = ud.path
|
||||
else:
|
||||
cvsroot = ":" + method
|
||||
cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
|
||||
cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
|
||||
if cvsproxyhost:
|
||||
cvsroot += ";proxy=" + cvsproxyhost
|
||||
cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
|
||||
cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
|
||||
if cvsproxyport:
|
||||
cvsroot += ";proxyport=" + cvsproxyport
|
||||
cvsroot += ":" + ud.user
|
||||
@@ -110,9 +111,15 @@ class Cvs(FetchMethod):
|
||||
if ud.tag:
|
||||
options.append("-r %s" % ud.tag)
|
||||
|
||||
cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
|
||||
cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
|
||||
cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
data.setVar('CVSROOT', cvsroot, localdata)
|
||||
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||
data.setVar('CVSMODULE', ud.module, localdata)
|
||||
cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
|
||||
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
|
||||
|
||||
if cvs_rsh:
|
||||
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||
@@ -120,8 +127,8 @@ class Cvs(FetchMethod):
|
||||
|
||||
# create module directory
|
||||
logger.debug(2, "Fetch: checking for module directory")
|
||||
pkg = d.getVar('PN', True)
|
||||
pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
|
||||
pkg = data.expand('${PN}', d)
|
||||
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||
moddir = os.path.join(pkgdir, localdir)
|
||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||
logger.info("Update " + loc)
|
||||
@@ -162,9 +169,12 @@ class Cvs(FetchMethod):
|
||||
|
||||
def clean(self, ud, d):
|
||||
""" Clean CVS Files and tarballs """
|
||||
|
||||
pkg = d.getVar('PN', True)
|
||||
pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg)
|
||||
|
||||
pkg = data.expand('${PN}', d)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||
|
||||
bb.utils.remove(pkgdir, True)
|
||||
bb.utils.remove(ud.localpath)
|
||||
|
||||
@@ -82,9 +82,6 @@ class Git(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['git']
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
"""
|
||||
init git specific variable within url data
|
||||
@@ -126,11 +123,10 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
# Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
|
||||
if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
|
||||
if ud.revisions[name]:
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.branches[name] = ud.revisions[name]
|
||||
ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
|
||||
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.').replace('*', '.'))
|
||||
gitsrcname = '%s%s' % (ud.host.replace(':','.'), ud.path.replace('/', '.'))
|
||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||
# per revision, so that even the revision disappears from the
|
||||
# upstream repo in the future, the mirror will remain intact and still
|
||||
@@ -139,9 +135,8 @@ class Git(FetchMethod):
|
||||
for name in ud.names:
|
||||
gitsrcname = gitsrcname + '_' + ud.revisions[name]
|
||||
ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
|
||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball)
|
||||
gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/")
|
||||
ud.clonedir = os.path.join(gitdir, gitsrcname)
|
||||
ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
|
||||
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||
|
||||
ud.localfile = ud.clonedir
|
||||
|
||||
@@ -188,12 +183,8 @@ class Git(FetchMethod):
|
||||
|
||||
# If the repo still doesn't exist, fallback to cloning it
|
||||
if not os.path.exists(ud.clonedir):
|
||||
# We do this since git will use a "-l" option automatically for local urls where possible
|
||||
if repourl.startswith("file://"):
|
||||
repourl = repourl[7:]
|
||||
clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
bb.fetch2.check_network_access(d, clone_cmd)
|
||||
runfetchcmd(clone_cmd, d)
|
||||
|
||||
os.chdir(ud.clonedir)
|
||||
@@ -204,14 +195,14 @@ class Git(FetchMethod):
|
||||
needupdate = True
|
||||
if needupdate:
|
||||
try:
|
||||
runfetchcmd("%s remote prune origin" % ud.basecmd, d)
|
||||
runfetchcmd("%s remote rm origin" % ud.basecmd, d)
|
||||
except bb.fetch2.FetchError:
|
||||
logger.debug(1, "No Origin")
|
||||
|
||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
|
||||
fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||
runfetchcmd(fetch_cmd, d)
|
||||
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||
@@ -245,23 +236,7 @@ class Git(FetchMethod):
|
||||
if ud.bareclone:
|
||||
cloneflags += " --mirror"
|
||||
|
||||
# Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
|
||||
# and you end up with some horrible union of the two when you attempt to clone it
|
||||
# The least invasive workaround seems to be a symlink to the real directory to
|
||||
# fool git into ignoring any .git version that may also be present.
|
||||
#
|
||||
# The issue is fixed in more recent versions of git so we can drop this hack in future
|
||||
# when that version becomes common enough.
|
||||
clonedir = ud.clonedir
|
||||
if not ud.path.endswith(".git"):
|
||||
indirectiondir = destdir[:-1] + ".indirectionsymlink"
|
||||
if os.path.exists(indirectiondir):
|
||||
os.remove(indirectiondir)
|
||||
bb.utils.mkdirhier(os.path.dirname(indirectiondir))
|
||||
os.symlink(ud.clonedir, indirectiondir)
|
||||
clonedir = indirectiondir
|
||||
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, clonedir, destdir), d)
|
||||
runfetchcmd("git clone %s %s/ %s" % (cloneflags, ud.clonedir, destdir), d)
|
||||
if not ud.nocheckout:
|
||||
os.chdir(destdir)
|
||||
if subdir != "":
|
||||
@@ -306,8 +281,7 @@ class Git(FetchMethod):
|
||||
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||
(basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
|
||||
if ud.proto.lower() != 'file':
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
bb.fetch2.check_network_access(d, cmd)
|
||||
output = runfetchcmd(cmd, d, True)
|
||||
if not output:
|
||||
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
|
||||
|
||||
@@ -82,7 +82,7 @@ class Hg(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'http')
|
||||
proto = ud.parm.get('proto', 'http')
|
||||
|
||||
host = ud.host
|
||||
if proto == "file":
|
||||
|
||||
@@ -26,12 +26,10 @@ BitBake build tools.
|
||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||
|
||||
import os
|
||||
import urllib
|
||||
import bb
|
||||
import bb.utils
|
||||
from bb import data
|
||||
from bb.fetch2 import FetchMethod, FetchError
|
||||
from bb.fetch2 import logger
|
||||
from bb.fetch2 import FetchMethod
|
||||
|
||||
class Local(FetchMethod):
|
||||
def supports(self, url, urldata, d):
|
||||
@@ -42,31 +40,27 @@ class Local(FetchMethod):
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
# We don't set localfile as for this fetcher the file is already local!
|
||||
ud.decodedurl = urllib.unquote(ud.url.split("://")[1].split(";")[0])
|
||||
ud.basename = os.path.basename(ud.decodedurl)
|
||||
ud.basename = os.path.basename(ud.url.split("://")[1].split(";")[0])
|
||||
return
|
||||
|
||||
def localpath(self, url, urldata, d):
|
||||
"""
|
||||
Return the local filename of a given url assuming a successful fetch.
|
||||
"""
|
||||
path = urldata.decodedurl
|
||||
path = url.split("://")[1]
|
||||
path = path.split(";")[0]
|
||||
newpath = path
|
||||
if path[0] != "/":
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
logger.debug(2, "Searching for %s in paths: \n%s" % (path, "\n ".join(filespath.split(":"))))
|
||||
newpath = bb.utils.which(filespath, path)
|
||||
if not newpath:
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
|
||||
newpath = os.path.join(filesdir, path)
|
||||
if not os.path.exists(newpath) and path.find("*") == -1:
|
||||
dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
|
||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
||||
return dldirfile
|
||||
if not os.path.exists(newpath) and path.find("*") == -1:
|
||||
dldirfile = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(path))
|
||||
return dldirfile
|
||||
return newpath
|
||||
|
||||
def need_update(self, url, ud, d):
|
||||
@@ -79,20 +73,7 @@ class Local(FetchMethod):
|
||||
def download(self, url, urldata, d):
|
||||
"""Fetch urls (no-op for Local method)"""
|
||||
# no need to fetch local files, we'll deal with them in place.
|
||||
if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
|
||||
locations = []
|
||||
filespath = data.getVar('FILESPATH', d, True)
|
||||
if filespath:
|
||||
locations = filespath.split(":")
|
||||
filesdir = data.getVar('FILESDIR', d, True)
|
||||
if filesdir:
|
||||
locations.append(filesdir)
|
||||
locations.append(d.getVar("DL_DIR", True))
|
||||
|
||||
msg = "Unable to find file " + url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
|
||||
raise FetchError(msg)
|
||||
|
||||
return True
|
||||
return 1
|
||||
|
||||
def checkstatus(self, url, urldata, d):
|
||||
"""
|
||||
|
||||
@@ -57,7 +57,7 @@ class Osc(FetchMethod):
|
||||
|
||||
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||
|
||||
proto = ud.parm.get('protocol', 'ocs')
|
||||
proto = ud.parm.get('proto', 'ocs')
|
||||
|
||||
options = []
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ BitBake build tools.
|
||||
|
||||
from future_builtins import zip
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import bb
|
||||
from bb import data
|
||||
@@ -91,8 +90,8 @@ class Perforce(FetchMethod):
|
||||
|
||||
p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
|
||||
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.strip()
|
||||
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||
cset = p4file.readline().strip()
|
||||
logger.debug(1, "READ %s", cset)
|
||||
if not cset:
|
||||
return -1
|
||||
@@ -155,8 +154,8 @@ class Perforce(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -169,8 +168,7 @@ class Perforce(FetchMethod):
|
||||
os.chdir(tmpfile)
|
||||
logger.info("Fetch " + loc)
|
||||
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||
p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
p4file = p4file.strip()
|
||||
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||
|
||||
if not p4file:
|
||||
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
|
||||
@@ -186,7 +184,7 @@ class Perforce(FetchMethod):
|
||||
dest = list[0][len(path)+1:]
|
||||
where = dest.find("#")
|
||||
|
||||
subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
|
||||
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||
count = count + 1
|
||||
|
||||
if count == 0:
|
||||
|
||||
@@ -10,12 +10,6 @@ IETF secsh internet draft:
|
||||
Currently does not support the sftp parameters, as this uses scp
|
||||
Also does not support the 'fingerprint' connection parameter.
|
||||
|
||||
Please note that '/' is used as host, path separator not ':' as you may
|
||||
be used to, also '~' can be used to specify user HOME, but again after '/'
|
||||
|
||||
Example SRC_URI:
|
||||
SRC_URI = "ssh://user@host.example.com/dir/path/file.txt"
|
||||
SRC_URI = "ssh://user@host.example.com/~/file.txt"
|
||||
'''
|
||||
|
||||
# Copyright (C) 2006 OpenedHand Ltd.
|
||||
@@ -75,22 +69,15 @@ class SSH(FetchMethod):
|
||||
def supports(self, url, urldata, d):
|
||||
return __pattern__.match(url) != None
|
||||
|
||||
def supports_checksum(self, urldata):
|
||||
return False
|
||||
|
||||
def urldata_init(self, urldata, d):
|
||||
if 'protocol' in urldata.parm and urldata.parm['protocol'] == 'git':
|
||||
raise bb.fetch2.ParameterError(
|
||||
"Invalid protocol - if you wish to fetch from a git " +
|
||||
"repository using ssh, you need to use " +
|
||||
"git:// prefix with protocol=ssh", urldata.url)
|
||||
def localpath(self, url, urldata, d):
|
||||
m = __pattern__.match(urldata.url)
|
||||
path = m.group('path')
|
||||
host = m.group('host')
|
||||
urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
|
||||
lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
|
||||
return lpath
|
||||
|
||||
def download(self, url, urldata, d):
|
||||
dldir = d.getVar('DL_DIR', True)
|
||||
dldir = data.getVar('DL_DIR', d, True)
|
||||
|
||||
m = __pattern__.match(url)
|
||||
path = m.group('path')
|
||||
@@ -99,10 +86,16 @@ class SSH(FetchMethod):
|
||||
user = m.group('user')
|
||||
password = m.group('pass')
|
||||
|
||||
ldir = os.path.join(dldir, host)
|
||||
lpath = os.path.join(ldir, os.path.basename(path))
|
||||
|
||||
if not os.path.exists(ldir):
|
||||
os.makedirs(ldir)
|
||||
|
||||
if port:
|
||||
portarg = '-P %s' % port
|
||||
port = '-P %s' % port
|
||||
else:
|
||||
portarg = ''
|
||||
port = ''
|
||||
|
||||
if user:
|
||||
fr = user
|
||||
@@ -116,9 +109,9 @@ class SSH(FetchMethod):
|
||||
|
||||
import commands
|
||||
cmd = 'scp -B -r %s %s %s/' % (
|
||||
portarg,
|
||||
port,
|
||||
commands.mkarg(fr),
|
||||
commands.mkarg(dldir)
|
||||
commands.mkarg(ldir)
|
||||
)
|
||||
|
||||
bb.fetch2.check_network_access(d, cmd, urldata.url)
|
||||
|
||||
@@ -77,8 +77,8 @@ class Svk(FetchMethod):
|
||||
logger.debug(2, "Fetch: creating temporary directory")
|
||||
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||
tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmpfile.strip()
|
||||
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||
tmpfile = tmppipe.readline().strip()
|
||||
if not tmpfile:
|
||||
logger.error()
|
||||
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||
|
||||
@@ -49,8 +49,6 @@ class Svn(FetchMethod):
|
||||
if not "module" in ud.parm:
|
||||
raise MissingParameterError('module', ud.url)
|
||||
|
||||
ud.basecmd = d.getVar('FETCHCMD_svn', True)
|
||||
|
||||
ud.module = ud.parm["module"]
|
||||
|
||||
# Create paths to svn checkouts
|
||||
@@ -71,7 +69,9 @@ class Svn(FetchMethod):
|
||||
command is "fetch", "update", "info"
|
||||
"""
|
||||
|
||||
proto = ud.parm.get('protocol', 'svn')
|
||||
basecmd = data.expand('${FETCHCMD_svn}', d)
|
||||
|
||||
proto = ud.parm.get('proto', 'svn')
|
||||
|
||||
svn_rsh = None
|
||||
if proto == "svn+ssh" and "rsh" in ud.parm:
|
||||
@@ -88,7 +88,7 @@ class Svn(FetchMethod):
|
||||
options.append("--password %s" % ud.pswd)
|
||||
|
||||
if command == "info":
|
||||
svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||
else:
|
||||
suffix = ""
|
||||
if ud.revision:
|
||||
@@ -96,9 +96,9 @@ class Svn(FetchMethod):
|
||||
suffix = "@%s" % (ud.revision)
|
||||
|
||||
if command == "fetch":
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||
elif command == "update":
|
||||
svncmd = "%s update %s" % (ud.basecmd, " ".join(options))
|
||||
svncmd = "%s update %s" % (basecmd, " ".join(options))
|
||||
else:
|
||||
raise FetchError("Invalid svn command %s" % command, ud.url)
|
||||
|
||||
@@ -117,11 +117,6 @@ class Svn(FetchMethod):
|
||||
logger.info("Update " + loc)
|
||||
# update sources there
|
||||
os.chdir(ud.moddir)
|
||||
# We need to attempt to run svn upgrade first in case its an older working format
|
||||
try:
|
||||
runfetchcmd(ud.basecmd + " upgrade", d)
|
||||
except FetchError:
|
||||
pass
|
||||
logger.debug(1, "Running %s", svnupdatecmd)
|
||||
bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
|
||||
runfetchcmd(svnupdatecmd, d)
|
||||
|
||||
@@ -45,55 +45,47 @@ class Wget(FetchMethod):
|
||||
"""
|
||||
return ud.type in ['http', 'https', 'ftp']
|
||||
|
||||
def recommends_checksum(self, urldata):
|
||||
return True
|
||||
|
||||
def urldata_init(self, ud, d):
|
||||
if 'protocol' in ud.parm:
|
||||
if ud.parm['protocol'] == 'git':
|
||||
raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
|
||||
|
||||
if 'downloadfilename' in ud.parm:
|
||||
ud.basename = ud.parm['downloadfilename']
|
||||
else:
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
|
||||
ud.basename = os.path.basename(ud.path)
|
||||
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||
|
||||
def download(self, uri, ud, d, checkonly = False):
|
||||
"""Fetch urls"""
|
||||
|
||||
basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"
|
||||
def fetch_uri(uri, ud, d):
|
||||
if checkonly:
|
||||
fetchcmd = data.getVar("CHECKCOMMAND", d, True)
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = data.getVar("RESUMECOMMAND", d, True)
|
||||
else:
|
||||
fetchcmd = data.getVar("FETCHCOMMAND", d, True)
|
||||
|
||||
if 'downloadfilename' in ud.parm:
|
||||
basecmd += " -O ${DL_DIR}/" + ud.localfile
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
|
||||
if checkonly:
|
||||
fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'")
|
||||
elif os.path.exists(ud.localpath):
|
||||
# file exists, but we didnt complete it.. trying again..
|
||||
fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
|
||||
else:
|
||||
fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
|
||||
uri = uri.split(";")[0]
|
||||
uri_decoded = list(decodeurl(uri))
|
||||
uri_type = uri_decoded[0]
|
||||
uri_host = uri_decoded[1]
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
|
||||
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||
if not checkonly:
|
||||
logger.info("fetch " + uri)
|
||||
logger.debug(2, "executing " + fetchcmd)
|
||||
bb.fetch2.check_network_access(d, fetchcmd)
|
||||
runfetchcmd(fetchcmd, d, quiet=checkonly)
|
||||
|
||||
# Sanity check since wget can pretend it succeed when it didn't
|
||||
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||
if not os.path.exists(ud.localpath) and not checkonly:
|
||||
raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
|
||||
localdata = data.createCopy(d)
|
||||
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||
data.update_data(localdata)
|
||||
|
||||
fetch_uri(uri, ud, localdata)
|
||||
|
||||
return True
|
||||
|
||||
def checkstatus(self, uri, ud, d):
|
||||
|
||||
@@ -33,7 +33,9 @@
|
||||
from bb.utils import better_compile, better_exec
|
||||
from bb import error
|
||||
|
||||
# A dict of function names we have seen
|
||||
# A dict of modules we have handled
|
||||
# it is the number of .bbclasses + x in size
|
||||
_parsed_methods = { }
|
||||
_parsed_fns = { }
|
||||
|
||||
def insert_method(modulename, code, fn):
|
||||
@@ -50,22 +52,33 @@ def insert_method(modulename, code, fn):
|
||||
if name in ['None', 'False']:
|
||||
continue
|
||||
elif name in _parsed_fns and not _parsed_fns[name] == modulename:
|
||||
error("The function %s defined in %s was already declared in %s. BitBake has a global python function namespace so shared functions should be declared in a common include file rather than being duplicated, or if the functions are different, please use different function names." % (name, modulename, _parsed_fns[name]))
|
||||
error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
|
||||
else:
|
||||
_parsed_fns[name] = modulename
|
||||
|
||||
# A dict of modules the parser has finished with
|
||||
_parsed_methods = {}
|
||||
def check_insert_method(modulename, code, fn):
|
||||
"""
|
||||
Add the code if it wasnt added before. The module
|
||||
name will be used for that
|
||||
|
||||
Variables:
|
||||
@modulename a short name e.g. base.bbclass
|
||||
@code The actual python code
|
||||
@fn The filename from the outer file
|
||||
"""
|
||||
if not modulename in _parsed_methods:
|
||||
return insert_method(modulename, code, fn)
|
||||
_parsed_methods[modulename] = 1
|
||||
|
||||
def parsed_module(modulename):
|
||||
"""
|
||||
Has module been parsed?
|
||||
Inform me file xyz was parsed
|
||||
"""
|
||||
return modulename in _parsed_methods
|
||||
|
||||
def set_parsed_module(modulename):
|
||||
"""
|
||||
Set module as parsed
|
||||
"""
|
||||
_parsed_methods[modulename] = True
|
||||
|
||||
def get_parsed_dict():
|
||||
"""
|
||||
shortcut
|
||||
"""
|
||||
return _parsed_methods
|
||||
|
||||
@@ -128,7 +128,7 @@ def getDiskData(BBDirs, configuration):
|
||||
if not os.path.exists(path):
|
||||
bb.utils.mkdirhier(path)
|
||||
mountedDev = getMountedDev(path)
|
||||
devDict[mountedDev] = [action, path, minSpace, minInode]
|
||||
devDict[mountedDev] = action, path, minSpace, minInode
|
||||
|
||||
return devDict
|
||||
|
||||
@@ -176,7 +176,6 @@ class diskMonitor:
|
||||
def __init__(self, configuration):
|
||||
|
||||
self.enableMonitor = False
|
||||
self.configuration = configuration
|
||||
|
||||
BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None
|
||||
if BBDirs:
|
||||
@@ -220,24 +219,15 @@ class diskMonitor:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, self.devDict[dev][1]), self.configuration)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, self.devDict[dev][1]), self.configuration)
|
||||
|
||||
# The free inodes, float point number
|
||||
freeInode = st.f_favail
|
||||
|
||||
if self.devDict[dev][3] and freeInode < self.devDict[dev][3]:
|
||||
# Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
|
||||
# zero, this is a feature of the fs, we disable the inode
|
||||
# checking for such a fs.
|
||||
if st.f_files == 0:
|
||||
logger.warn("Inode check for %s is unavaliable, remove it from disk monitor" % dev)
|
||||
self.devDict[dev][3] = None
|
||||
continue
|
||||
# Always show warning, the self.checked would always be False if the action is WARN
|
||||
if self.preFreeI[dev] == 0 or self.preFreeI[dev] - freeInode > self.inodeInterval and not self.checked[dev]:
|
||||
logger.warn("The free inode of %s is running low (%.3fK left)" % (dev, freeInode / 1024.0))
|
||||
@@ -247,10 +237,8 @@ class diskMonitor:
|
||||
logger.error("No new tasks can be excuted since the disk space monitor action is \"STOPTASKS\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(False)
|
||||
bb.event.fire(bb.event.DiskFull(dev, 'inode', freeSpace, self.devDict[dev][1]), self.configuration)
|
||||
elif self.devDict[dev][0] == "ABORT" and not self.checked[dev]:
|
||||
logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!")
|
||||
self.checked[dev] = True
|
||||
rq.finish_runqueue(True)
|
||||
bb.event.fire(bb.event.DiskFull(dev, 'inode', freeSpace, self.devDict[dev][1]), self.configuration)
|
||||
return
|
||||
|
||||
@@ -31,6 +31,7 @@ import itertools
|
||||
from bb import methodpool
|
||||
from bb.parse import logger
|
||||
|
||||
__parsed_methods__ = bb.methodpool.get_parsed_dict()
|
||||
_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
|
||||
|
||||
class StatementGroup(list):
|
||||
@@ -125,25 +126,23 @@ class MethodNode(AstNode):
|
||||
self.body = body
|
||||
|
||||
def eval(self, data):
|
||||
text = '\n'.join(self.body)
|
||||
if self.func_name == "__anonymous":
|
||||
funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____'))))
|
||||
if not funcname in bb.methodpool._parsed_fns:
|
||||
text = "def %s(d):\n" % (funcname) + text
|
||||
text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
|
||||
bb.methodpool.insert_method(funcname, text, self.filename)
|
||||
anonfuncs = data.getVar('__BBANONFUNCS') or []
|
||||
anonfuncs.append(funcname)
|
||||
data.setVar('__BBANONFUNCS', anonfuncs)
|
||||
data.setVar(funcname, text)
|
||||
else:
|
||||
data.setVarFlag(self.func_name, "func", 1)
|
||||
data.setVar(self.func_name, text)
|
||||
data.setVar(self.func_name, '\n'.join(self.body))
|
||||
|
||||
class PythonMethodNode(AstNode):
|
||||
def __init__(self, filename, lineno, function, modulename, body):
|
||||
def __init__(self, filename, lineno, function, define, body):
|
||||
AstNode.__init__(self, filename, lineno)
|
||||
self.function = function
|
||||
self.modulename = modulename
|
||||
self.define = define
|
||||
self.body = body
|
||||
|
||||
def eval(self, data):
|
||||
@@ -151,8 +150,8 @@ class PythonMethodNode(AstNode):
|
||||
# 'this' file. This means we will not parse methods from
|
||||
# bb classes twice
|
||||
text = '\n'.join(self.body)
|
||||
if not bb.methodpool.parsed_module(self.modulename):
|
||||
bb.methodpool.insert_method(self.modulename, text, self.filename)
|
||||
if not bb.methodpool.parsed_module(self.define):
|
||||
bb.methodpool.insert_method(self.define, text, self.filename)
|
||||
data.setVarFlag(self.function, "func", 1)
|
||||
data.setVarFlag(self.function, "python", 1)
|
||||
data.setVar(self.function, text)
|
||||
@@ -213,9 +212,9 @@ class ExportFuncsNode(AstNode):
|
||||
data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag))
|
||||
|
||||
if data.getVarFlag(calledvar, "python"):
|
||||
data.setVar(var, " bb.build.exec_func('" + calledvar + "', d)\n")
|
||||
data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n")
|
||||
else:
|
||||
data.setVar(var, " " + calledvar + "\n")
|
||||
data.setVar(var, "\t" + calledvar + "\n")
|
||||
data.setVarFlag(var, 'export_func', '1')
|
||||
|
||||
class AddTaskNode(AstNode):
|
||||
@@ -282,8 +281,8 @@ def handleData(statements, filename, lineno, groupd):
|
||||
def handleMethod(statements, filename, lineno, func_name, body):
|
||||
statements.append(MethodNode(filename, lineno, func_name, body))
|
||||
|
||||
def handlePythonMethod(statements, filename, lineno, funcname, modulename, body):
|
||||
statements.append(PythonMethodNode(filename, lineno, funcname, modulename, body))
|
||||
def handlePythonMethod(statements, filename, lineno, funcname, root, body):
|
||||
statements.append(PythonMethodNode(filename, lineno, funcname, root, body))
|
||||
|
||||
def handleMethodFlags(statements, filename, lineno, key, m):
|
||||
statements.append(MethodFlagsNode(filename, lineno, key, m))
|
||||
@@ -321,7 +320,7 @@ def finalize(fn, d, variant = None):
|
||||
code = []
|
||||
for funcname in d.getVar("__BBANONFUNCS") or []:
|
||||
code.append("%s(d)" % funcname)
|
||||
bb.utils.better_exec("\n".join(code), {"d": d})
|
||||
bb.utils.simple_exec("\n".join(code), {"d": d})
|
||||
bb.data.update_data(d)
|
||||
|
||||
tasklist = d.getVar('__BBTASKS') or []
|
||||
|
||||
@@ -69,7 +69,7 @@ def supports(fn, d):
|
||||
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
|
||||
|
||||
def inherit(files, fn, lineno, d):
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
files = d.expand(files).split()
|
||||
for file in files:
|
||||
if not os.path.isabs(file) and not file.endswith(".bbclass"):
|
||||
@@ -80,7 +80,7 @@ def inherit(files, fn, lineno, d):
|
||||
__inherit_cache.append( file )
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
include(fn, file, lineno, d, "inherit")
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
|
||||
def get_statements(filename, absolute_filename, base_name):
|
||||
global cached_statements
|
||||
@@ -126,13 +126,13 @@ def handle(fn, d, include):
|
||||
if ext == ".bbclass":
|
||||
__classname__ = root
|
||||
classes.append(__classname__)
|
||||
__inherit_cache = d.getVar('__inherit_cache') or []
|
||||
__inherit_cache = data.getVar('__inherit_cache', d) or []
|
||||
if not fn in __inherit_cache:
|
||||
__inherit_cache.append(fn)
|
||||
data.setVar('__inherit_cache', __inherit_cache, d)
|
||||
|
||||
if include != 0:
|
||||
oldfile = d.getVar('FILE')
|
||||
oldfile = data.getVar('FILE', d)
|
||||
else:
|
||||
oldfile = None
|
||||
|
||||
@@ -161,7 +161,7 @@ def handle(fn, d, include):
|
||||
|
||||
# we have parsed the bb class now
|
||||
if ext == ".bbclass" or ext == ".inc":
|
||||
bb.methodpool.set_parsed_module(base_name)
|
||||
bb.methodpool.get_parsed_dict()[base_name] = 1
|
||||
|
||||
return d
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ import logging
|
||||
import bb.utils
|
||||
from bb.parse import ParseError, resolve_file, ast, logger
|
||||
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?!'[^']*'[^']*'$)(?!\"[^\"]*\"[^\"]*\"$)(?P<apo>['\"])(?P<value>.*)(?P=apo)$")
|
||||
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"])(?P<value>.*)(?P=apo)$")
|
||||
__include_regexp__ = re.compile( r"include\s+(.+)" )
|
||||
__require_regexp__ = re.compile( r"require\s+(.+)" )
|
||||
__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" )
|
||||
@@ -71,14 +71,6 @@ def include(oldfn, fn, lineno, data, error_out):
|
||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
||||
logger.debug(2, "CONF file '%s' not found", fn)
|
||||
|
||||
# We have an issue where a UI might want to enforce particular settings such as
|
||||
# an empty DISTRO variable. If configuration files do something like assigning
|
||||
# a weak default, it turns out to be very difficult to filter out these changes,
|
||||
# particularly when the weak default might appear half way though parsing a chain
|
||||
# of configuration files. We therefore let the UIs hook into configuration file
|
||||
# parsing. This turns out to be a hard problem to solve any other way.
|
||||
confFilters = []
|
||||
|
||||
def handle(fn, data, include):
|
||||
init(data)
|
||||
|
||||
@@ -115,9 +107,6 @@ def handle(fn, data, include):
|
||||
if oldfile:
|
||||
data.setVar('FILE', oldfile)
|
||||
|
||||
for f in confFilters:
|
||||
f(fn, data)
|
||||
|
||||
return data
|
||||
|
||||
def feeder(lineno, s, fn, statements):
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import logging
|
||||
import signal
|
||||
import subprocess
|
||||
import errno
|
||||
import select
|
||||
|
||||
logger = logging.getLogger('BitBake.Process')
|
||||
|
||||
@@ -70,38 +68,20 @@ def _logged_communicate(pipe, log, input):
|
||||
pipe.stdin.write(input)
|
||||
pipe.stdin.close()
|
||||
|
||||
bufsize = 512
|
||||
outdata, errdata = [], []
|
||||
rin = []
|
||||
while pipe.poll() is None:
|
||||
if pipe.stdout is not None:
|
||||
data = pipe.stdout.read(bufsize)
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stdout is not None:
|
||||
bb.utils.nonblockingfd(pipe.stdout.fileno())
|
||||
rin.append(pipe.stdout)
|
||||
if pipe.stderr is not None:
|
||||
bb.utils.nonblockingfd(pipe.stderr.fileno())
|
||||
rin.append(pipe.stderr)
|
||||
|
||||
try:
|
||||
while pipe.poll() is None:
|
||||
rlist = rin
|
||||
try:
|
||||
r,w,e = select.select (rlist, [], [])
|
||||
except OSError, e:
|
||||
if e.errno != errno.EINTR:
|
||||
raise
|
||||
|
||||
if pipe.stdout in r:
|
||||
data = pipe.stdout.read()
|
||||
if data is not None:
|
||||
outdata.append(data)
|
||||
log.write(data)
|
||||
|
||||
if pipe.stderr in r:
|
||||
data = pipe.stderr.read()
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
finally:
|
||||
log.flush()
|
||||
if pipe.stderr is not None:
|
||||
data = pipe.stderr.read(bufsize)
|
||||
if data is not None:
|
||||
errdata.append(data)
|
||||
log.write(data)
|
||||
return ''.join(outdata), ''.join(errdata)
|
||||
|
||||
def run(cmd, input=None, log=None, **options):
|
||||
|
||||
@@ -35,8 +35,6 @@ class NoProvider(bb.BBHandledException):
|
||||
class NoRProvider(bb.BBHandledException):
|
||||
"""Exception raised when no provider of a runtime dependency can be found"""
|
||||
|
||||
class MultipleRProvider(bb.BBHandledException):
|
||||
"""Exception raised when multiple providers of a runtime dependency can be found"""
|
||||
|
||||
def findProviders(cfgData, dataCache, pkg_pn = None):
|
||||
"""
|
||||
@@ -130,7 +128,7 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
|
||||
m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
|
||||
if m:
|
||||
if m.group(1):
|
||||
preferred_e = m.group(1)[:-1]
|
||||
preferred_e = int(m.group(1)[:-1])
|
||||
else:
|
||||
preferred_e = None
|
||||
preferred_v = m.group(2)
|
||||
|
||||
@@ -375,8 +375,9 @@ class RunQueueData:
|
||||
"""
|
||||
|
||||
runq_build = []
|
||||
recursivetasks = {}
|
||||
recursivetasksselfref = set()
|
||||
recursive_tdepends = {}
|
||||
runq_recrdepends = []
|
||||
tdepends_fnid = {}
|
||||
|
||||
taskData = self.taskData
|
||||
|
||||
@@ -405,10 +406,11 @@ class RunQueueData:
|
||||
depdata = taskData.build_targets[depid][0]
|
||||
if depdata is None:
|
||||
continue
|
||||
dep = taskData.fn_index[depdata]
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, taskname)
|
||||
taskid = taskData.gettask_id(dep, taskname, False)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
depends.append(taskid)
|
||||
|
||||
def add_runtime_dependencies(depids, tasknames, depends):
|
||||
for depid in depids:
|
||||
@@ -417,20 +419,15 @@ class RunQueueData:
|
||||
depdata = taskData.run_targets[depid][0]
|
||||
if depdata is None:
|
||||
continue
|
||||
dep = taskData.fn_index[depdata]
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, taskname)
|
||||
taskid = taskData.gettask_id(dep, taskname, False)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
|
||||
def add_resolved_dependencies(depids, tasknames, depends):
|
||||
for depid in depids:
|
||||
for taskname in tasknames:
|
||||
taskid = taskData.gettask_id_fromfnid(depid, taskname)
|
||||
if taskid is not None:
|
||||
depends.add(taskid)
|
||||
depends.append(taskid)
|
||||
|
||||
for task in xrange(len(taskData.tasks_name)):
|
||||
depends = set()
|
||||
depends = []
|
||||
recrdepends = []
|
||||
fnid = taskData.tasks_fnid[task]
|
||||
fn = taskData.fn_index[fnid]
|
||||
task_deps = self.dataCache.task_deps[fn]
|
||||
@@ -442,7 +439,7 @@ class RunQueueData:
|
||||
# Resolve task internal dependencies
|
||||
#
|
||||
# e.g. addtask before X after Y
|
||||
depends = set(taskData.tasks_tdepends[task])
|
||||
depends = taskData.tasks_tdepends[task]
|
||||
|
||||
# Resolve 'deptask' dependencies
|
||||
#
|
||||
@@ -457,91 +454,99 @@ class RunQueueData:
|
||||
# e.g. do_sometask[rdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all RDEPENDS)
|
||||
if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
|
||||
tasknames = task_deps['rdeptask'][taskData.tasks_name[task]].split()
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
|
||||
taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
|
||||
|
||||
# Resolve inter-task dependencies
|
||||
#
|
||||
# e.g. do_sometask[depends] = "targetname:do_someothertask"
|
||||
# (makes sure sometask runs after targetname's someothertask)
|
||||
if fnid not in tdepends_fnid:
|
||||
tdepends_fnid[fnid] = set()
|
||||
idepends = taskData.tasks_idepends[task]
|
||||
for (depid, idependtask) in idepends:
|
||||
if depid in taskData.build_targets and not depid in taskData.failed_deps:
|
||||
if depid in taskData.build_targets:
|
||||
# Won't be in build_targets if ASSUME_PROVIDED
|
||||
depdata = taskData.build_targets[depid][0]
|
||||
if depdata is not None:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
|
||||
dep = taskData.fn_index[depdata]
|
||||
taskid = taskData.gettask_id(dep, idependtask, False)
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s in %s depends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
|
||||
depends.add(taskid)
|
||||
irdepends = taskData.tasks_irdepends[task]
|
||||
for (depid, idependtask) in irdepends:
|
||||
if depid in taskData.run_targets:
|
||||
# Won't be in run_targets if ASSUME_PROVIDED
|
||||
depdata = taskData.run_targets[depid][0]
|
||||
if depdata is not None:
|
||||
taskid = taskData.gettask_id_fromfnid(depdata, idependtask)
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s in %s rdepends upon non-existent task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
|
||||
depends.add(taskid)
|
||||
depends.append(taskid)
|
||||
if depdata != fnid:
|
||||
tdepends_fnid[fnid].add(taskid)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (Part A)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (A)
|
||||
#
|
||||
# e.g. do_sometask[recrdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
|
||||
# We cover the recursive part of the dependencies below
|
||||
if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
|
||||
tasknames = task_deps['recrdeptask'][taskData.tasks_name[task]].split()
|
||||
recursivetasks[task] = tasknames
|
||||
add_build_dependencies(taskData.depids[fnid], tasknames, depends)
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], tasknames, depends)
|
||||
if taskData.tasks_name[task] in tasknames:
|
||||
recursivetasksselfref.add(task)
|
||||
for taskname in task_deps['recrdeptask'][taskData.tasks_name[task]].split():
|
||||
recrdepends.append(taskname)
|
||||
add_build_dependencies(taskData.depids[fnid], [taskname], depends)
|
||||
add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
|
||||
|
||||
# Rmove all self references
|
||||
if task in depends:
|
||||
newdep = []
|
||||
logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
|
||||
for dep in depends:
|
||||
if task != dep:
|
||||
newdep.append(dep)
|
||||
depends = newdep
|
||||
|
||||
self.runq_fnid.append(taskData.tasks_fnid[task])
|
||||
self.runq_task.append(taskData.tasks_name[task])
|
||||
self.runq_depends.append(depends)
|
||||
self.runq_depends.append(set(depends))
|
||||
self.runq_revdeps.append(set())
|
||||
self.runq_hash.append("")
|
||||
|
||||
runq_build.append(0)
|
||||
runq_recrdepends.append(recrdepends)
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (Part B)
|
||||
#
|
||||
# Build a list of recursive cumulative dependencies for each fnid
|
||||
# We do this by fnid, since if A depends on some task in B
|
||||
# we're interested in later tasks B's fnid might have but B itself
|
||||
# doesn't depend on
|
||||
#
|
||||
# Algorithm is O(tasks) + O(tasks)*O(fnids)
|
||||
#
|
||||
reccumdepends = {}
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
fnid = self.runq_fnid[task]
|
||||
if fnid not in reccumdepends:
|
||||
if fnid in tdepends_fnid:
|
||||
reccumdepends[fnid] = tdepends_fnid[fnid]
|
||||
else:
|
||||
reccumdepends[fnid] = set()
|
||||
reccumdepends[fnid].update(self.runq_depends[task])
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
taskfnid = self.runq_fnid[task]
|
||||
for fnid in reccumdepends:
|
||||
if task in reccumdepends[fnid]:
|
||||
reccumdepends[fnid].add(task)
|
||||
if taskfnid in reccumdepends:
|
||||
reccumdepends[fnid].update(reccumdepends[taskfnid])
|
||||
|
||||
|
||||
# Resolve recursive 'recrdeptask' dependencies (B)
|
||||
#
|
||||
# e.g. do_sometask[recrdeptask] = "do_someothertask"
|
||||
# (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
|
||||
# We need to do this separately since we need all of self.runq_depends to be complete before this is processed
|
||||
extradeps = {}
|
||||
for task in recursivetasks:
|
||||
extradeps[task] = set(self.runq_depends[task])
|
||||
tasknames = recursivetasks[task]
|
||||
seendeps = set()
|
||||
seenfnid = []
|
||||
|
||||
def generate_recdeps(t):
|
||||
newdeps = set()
|
||||
add_resolved_dependencies([taskData.tasks_fnid[t]], tasknames, newdeps)
|
||||
extradeps[task].update(newdeps)
|
||||
seendeps.add(t)
|
||||
newdeps.add(t)
|
||||
for i in newdeps:
|
||||
for n in self.runq_depends[i]:
|
||||
if n not in seendeps:
|
||||
generate_recdeps(n)
|
||||
generate_recdeps(task)
|
||||
|
||||
# Remove circular references so that do_a[recrdeptask] = "do_a do_b" can work
|
||||
for task in recursivetasks:
|
||||
extradeps[task].difference_update(recursivetasksselfref)
|
||||
|
||||
for task in xrange(len(taskData.tasks_name)):
|
||||
# Add in extra dependencies
|
||||
if task in extradeps:
|
||||
self.runq_depends[task] = extradeps[task]
|
||||
# Remove all self references
|
||||
if task in self.runq_depends[task]:
|
||||
logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], self.runq_depends[task])
|
||||
self.runq_depends[task].remove(task)
|
||||
for task in xrange(len(self.runq_fnid)):
|
||||
if len(runq_recrdepends[task]) > 0:
|
||||
taskfnid = self.runq_fnid[task]
|
||||
for dep in reccumdepends[taskfnid]:
|
||||
# Ignore self references
|
||||
if dep == task:
|
||||
continue
|
||||
for taskname in runq_recrdepends[task]:
|
||||
if taskData.tasks_name[dep] == taskname:
|
||||
self.runq_depends[task].add(dep)
|
||||
|
||||
# Step B - Mark all active tasks
|
||||
#
|
||||
@@ -692,36 +697,13 @@ class RunQueueData:
|
||||
stampfnwhitelist.append(fn)
|
||||
self.stampfnwhitelist = stampfnwhitelist
|
||||
|
||||
# Iterate over the task list looking for tasks with a 'setscene' function
|
||||
# Interate over the task list looking for tasks with a 'setscene' function
|
||||
self.runq_setscene = []
|
||||
if not self.cooker.configuration.nosetscene:
|
||||
for task in range(len(self.runq_fnid)):
|
||||
setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
|
||||
if not setscene:
|
||||
continue
|
||||
self.runq_setscene.append(task)
|
||||
|
||||
def invalidate_task(fn, taskname, error_nostamp):
|
||||
taskdep = self.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
|
||||
if error_nostamp:
|
||||
bb.fatal("Task %s is marked nostamp, cannot invalidate this task" % taskname)
|
||||
else:
|
||||
bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
|
||||
else:
|
||||
logger.verbose("Invalidate task %s, %s", taskname, fn)
|
||||
bb.parse.siggen.invalidate_task(taskname, self.dataCache, fn)
|
||||
|
||||
# Invalidate task if force mode active
|
||||
if self.cooker.configuration.force:
|
||||
for (fn, target) in self.target_pairs:
|
||||
invalidate_task(fn, target, False)
|
||||
|
||||
# Invalidate task if invalidate mode active
|
||||
if self.cooker.configuration.invalidate_stamp:
|
||||
for (fn, target) in self.target_pairs:
|
||||
for st in self.cooker.configuration.invalidate_stamp.split(','):
|
||||
invalidate_task(fn, "do_%s" % st, True)
|
||||
for task in range(len(self.runq_fnid)):
|
||||
setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
|
||||
if not setscene:
|
||||
continue
|
||||
self.runq_setscene.append(task)
|
||||
|
||||
# Interate over the task list and call into the siggen code
|
||||
dealtwith = set()
|
||||
@@ -749,6 +731,12 @@ class RunQueueData:
|
||||
deps.append(depidentifier)
|
||||
self.hash_deps[identifier] = deps
|
||||
|
||||
# Remove stamps for targets if force mode active
|
||||
if self.cooker.configuration.force:
|
||||
for (fn, target) in self.target_pairs:
|
||||
logger.verbose("Remove stamp %s, %s", target, fn)
|
||||
bb.build.del_stamp(target, self.dataCache, fn)
|
||||
|
||||
return len(self.runq_fnid)
|
||||
|
||||
def dump_data(self, taskQueue):
|
||||
@@ -793,7 +781,101 @@ class RunQueue:
|
||||
|
||||
self.rqexe = None
|
||||
|
||||
def check_stamp_task(self, task, taskname = None, recurse = False, cache = None):
|
||||
def check_stamps(self):
|
||||
unchecked = {}
|
||||
current = []
|
||||
notcurrent = []
|
||||
buildable = []
|
||||
|
||||
if self.stamppolicy == "perfile":
|
||||
fulldeptree = False
|
||||
else:
|
||||
fulldeptree = True
|
||||
stampwhitelist = []
|
||||
if self.stamppolicy == "whitelist":
|
||||
stampwhitelist = self.rqdata.stampfnwhitelist
|
||||
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
unchecked[task] = ""
|
||||
if len(self.rqdata.runq_depends[task]) == 0:
|
||||
buildable.append(task)
|
||||
|
||||
def check_buildable(self, task, buildable):
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
alldeps = 1
|
||||
for dep in self.rqdata.runq_depends[revdep]:
|
||||
if dep in unchecked:
|
||||
alldeps = 0
|
||||
if alldeps == 1:
|
||||
if revdep in unchecked:
|
||||
buildable.append(revdep)
|
||||
|
||||
for task in xrange(len(self.rqdata.runq_fnid)):
|
||||
if task not in unchecked:
|
||||
continue
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
# If the stamp is missing its not current
|
||||
if not os.access(stampfile, os.F_OK):
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
# If its a 'nostamp' task, it's not current
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
if 'nostamp' in taskdep and task in taskdep['nostamp']:
|
||||
del unchecked[task]
|
||||
notcurrent.append(task)
|
||||
check_buildable(self, task, buildable)
|
||||
continue
|
||||
|
||||
while (len(buildable) > 0):
|
||||
nextbuildable = []
|
||||
for task in buildable:
|
||||
if task in unchecked:
|
||||
fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
|
||||
iscurrent = True
|
||||
|
||||
t1 = os.stat(stampfile)[stat.ST_MTIME]
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
if iscurrent:
|
||||
fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
|
||||
taskname2 = self.rqdata.runq_task[dep]
|
||||
stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
|
||||
if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
|
||||
if dep in notcurrent:
|
||||
iscurrent = False
|
||||
else:
|
||||
t2 = os.stat(stampfile2)[stat.ST_MTIME]
|
||||
if t1 < t2:
|
||||
iscurrent = False
|
||||
del unchecked[task]
|
||||
if iscurrent:
|
||||
current.append(task)
|
||||
else:
|
||||
notcurrent.append(task)
|
||||
|
||||
check_buildable(self, task, nextbuildable)
|
||||
|
||||
buildable = nextbuildable
|
||||
|
||||
#for task in range(len(self.runq_fnid)):
|
||||
# fn = self.taskData.fn_index[self.runq_fnid[task]]
|
||||
# taskname = self.runq_task[task]
|
||||
# print "%s %s.%s" % (task, taskname, fn)
|
||||
|
||||
#print "Unchecked: %s" % unchecked
|
||||
#print "Current: %s" % current
|
||||
#print "Not current: %s" % notcurrent
|
||||
|
||||
if len(unchecked) > 0:
|
||||
bb.msg.fatal("RunQueue", "check_stamps fatal internal error")
|
||||
return current
|
||||
|
||||
def check_stamp_task(self, task, taskname = None, recurse = False):
|
||||
def get_timestamp(f):
|
||||
try:
|
||||
if not os.access(f, os.F_OK):
|
||||
@@ -829,9 +911,6 @@ class RunQueue:
|
||||
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
||||
return True
|
||||
|
||||
if cache is None:
|
||||
cache = {}
|
||||
|
||||
iscurrent = True
|
||||
t1 = get_timestamp(stampfile)
|
||||
for dep in self.rqdata.runq_depends[task]:
|
||||
@@ -852,18 +931,10 @@ class RunQueue:
|
||||
logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
|
||||
iscurrent = False
|
||||
if recurse and iscurrent:
|
||||
if dep in cache:
|
||||
iscurrent = cache[dep]
|
||||
if not iscurrent:
|
||||
logger.debug(2, 'Stampfile for dependency %s:%s invalid (cached)' % (fn2, taskname2))
|
||||
else:
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True, cache=cache)
|
||||
cache[dep] = iscurrent
|
||||
if recurse:
|
||||
cache[task] = iscurrent
|
||||
iscurrent = self.check_stamp_task(dep, recurse=True)
|
||||
return iscurrent
|
||||
|
||||
def _execute_runqueue(self):
|
||||
def execute_runqueue(self):
|
||||
"""
|
||||
Run the tasks in a queue prepared by rqdata.prepare()
|
||||
Upon failure, optionally try to recover the build using any alternate providers
|
||||
@@ -927,19 +998,6 @@ class RunQueue:
|
||||
# Loop
|
||||
return retval
|
||||
|
||||
def execute_runqueue(self):
|
||||
# Catch unexpected exceptions and ensure we exit when an error occurs, not loop.
|
||||
try:
|
||||
return self._execute_runqueue()
|
||||
except bb.runqueue.TaskFailure:
|
||||
raise
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
logger.error("An uncaught exception occured in runqueue, please see the failure below:")
|
||||
self.state = runQueueComplete
|
||||
raise
|
||||
|
||||
def finish_runqueue(self, now = False):
|
||||
if not self.rqexe:
|
||||
return
|
||||
@@ -983,36 +1041,23 @@ class RunQueueExecute:
|
||||
self.build_stamps = {}
|
||||
self.failed_fnids = []
|
||||
|
||||
self.stampcache = {}
|
||||
|
||||
def runqueue_process_waitpid(self):
|
||||
"""
|
||||
Return none is there are no processes awaiting result collection, otherwise
|
||||
collect the process exit codes and close the information pipe.
|
||||
"""
|
||||
pid, status = os.waitpid(-1, os.WNOHANG)
|
||||
if pid == 0 or os.WIFSTOPPED(status):
|
||||
result = os.waitpid(-1, os.WNOHANG)
|
||||
if result[0] == 0 and result[1] == 0:
|
||||
return None
|
||||
|
||||
if os.WIFEXITED(status):
|
||||
status = os.WEXITSTATUS(status)
|
||||
elif os.WIFSIGNALED(status):
|
||||
# Per shell conventions for $?, when a process exits due to
|
||||
# a signal, we return an exit code of 128 + SIGNUM
|
||||
status = 128 + os.WTERMSIG(status)
|
||||
|
||||
task = self.build_pids[pid]
|
||||
del self.build_pids[pid]
|
||||
|
||||
self.build_pipes[pid].close()
|
||||
del self.build_pipes[pid]
|
||||
|
||||
# self.build_stamps[pid] may not exist when use shared work directory.
|
||||
if pid in self.build_stamps:
|
||||
del self.build_stamps[pid]
|
||||
|
||||
if status != 0:
|
||||
self.task_fail(task, status)
|
||||
task = self.build_pids[result[0]]
|
||||
del self.build_pids[result[0]]
|
||||
self.build_pipes[result[0]].close()
|
||||
del self.build_pipes[result[0]]
|
||||
# self.build_stamps[result[0]] may not exist when use shared work directory.
|
||||
if result[0] in self.build_stamps.keys():
|
||||
del self.build_stamps[result[0]]
|
||||
if result[1] != 0:
|
||||
self.task_fail(task, result[1]>>8)
|
||||
else:
|
||||
self.task_complete(task)
|
||||
return True
|
||||
@@ -1119,6 +1164,8 @@ class RunQueueExecute:
|
||||
os.umask(umask)
|
||||
|
||||
self.cooker.configuration.data.setVar("BB_WORKERCONTEXT", "1")
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self)
|
||||
self.cooker.configuration.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn)
|
||||
bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
|
||||
ret = 0
|
||||
try:
|
||||
@@ -1176,8 +1223,6 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
|
||||
self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
|
||||
|
||||
self.stampcache = {}
|
||||
|
||||
# Mark initial buildable tasks
|
||||
for task in xrange(self.stats.total):
|
||||
self.runq_running.append(0)
|
||||
@@ -1186,7 +1231,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
self.runq_buildable.append(1)
|
||||
else:
|
||||
self.runq_buildable.append(0)
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
self.rq.scenequeue_covered.add(task)
|
||||
|
||||
found = True
|
||||
@@ -1197,7 +1242,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
continue
|
||||
logger.debug(1, 'Considering %s (%s): %s' % (task, self.rqdata.get_user_idstring(task), str(self.rqdata.runq_revdeps[task])))
|
||||
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered) and task not in self.rq.scenequeue_notcovered:
|
||||
if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
|
||||
ok = True
|
||||
for revdep in self.rqdata.runq_revdeps[task]:
|
||||
if self.rqdata.runq_fnid[task] != self.rqdata.runq_fnid[revdep]:
|
||||
@@ -1214,30 +1259,9 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
# Allow the metadata to elect for setscene tasks to run anyway
|
||||
covered_remove = set()
|
||||
if self.rq.setsceneverify:
|
||||
invalidtasks = []
|
||||
for task in xrange(len(self.rqdata.runq_task)):
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
taskname = self.rqdata.runq_task[task]
|
||||
taskdep = self.rqdata.dataCache.task_deps[fn]
|
||||
|
||||
if 'noexec' in taskdep and taskname in taskdep['noexec']:
|
||||
continue
|
||||
if self.rq.check_stamp_task(task, taskname + "_setscene", cache=self.stampcache):
|
||||
logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
|
||||
continue
|
||||
if self.rq.check_stamp_task(task, taskname, recurse = True, cache=self.stampcache):
|
||||
logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(task))
|
||||
continue
|
||||
invalidtasks.append(task)
|
||||
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d, invalidtasks=invalidtasks)"
|
||||
call2 = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.configuration.data, "invalidtasks" : invalidtasks }
|
||||
# Backwards compatibility with older versions without invalidtasks
|
||||
try:
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
except TypeError:
|
||||
covered_remove = bb.utils.better_eval(call2, locs)
|
||||
call = self.rq.setsceneverify + "(covered, tasknames, fnids, fns, d)"
|
||||
locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : self.rqdata.runq_task, "fnids" : self.rqdata.runq_fnid, "fns" : self.rqdata.taskData.fn_index, "d" : self.cooker.configuration.data }
|
||||
covered_remove = bb.utils.better_eval(call, locs)
|
||||
|
||||
for task in covered_remove:
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
|
||||
@@ -1349,7 +1373,7 @@ class RunQueueExecuteTasks(RunQueueExecute):
|
||||
self.task_skip(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(task, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(task, taskname):
|
||||
logger.debug(2, "Stamp current task %s (%s)", task,
|
||||
self.rqdata.get_user_idstring(task))
|
||||
self.task_skip(task)
|
||||
@@ -1490,7 +1514,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
dep = self.rqdata.taskData.fn_index[depdata]
|
||||
taskid = self.rqdata.get_task_id(self.rqdata.taskData.getfn_id(dep), idependtask.replace("_setscene", ""))
|
||||
if taskid is None:
|
||||
bb.msg.fatal("RunQueue", "Task %s:%s depends upon non-existent task %s:%s" % (self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realid]], self.rqdata.taskData.tasks_name[realid], dep, idependtask))
|
||||
bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (self.rqdata.taskData.tasks_name[realid], dep, idependtask))
|
||||
|
||||
sq_revdeps_squash[self.rqdata.runq_setscene.index(task)].add(self.rqdata.runq_setscene.index(taskid))
|
||||
# Have to zero this to avoid circular dependencies
|
||||
@@ -1533,18 +1557,12 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
|
||||
continue
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene", cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname + "_setscene"):
|
||||
logger.debug(2, 'Setscene stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
|
||||
stamppresent.append(task)
|
||||
self.task_skip(task)
|
||||
continue
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname, recurse = True, cache=self.stampcache):
|
||||
logger.debug(2, 'Normal stamp current for task %s(%s)', task, self.rqdata.get_user_idstring(realtask))
|
||||
stamppresent.append(task)
|
||||
self.task_skip(task)
|
||||
continue
|
||||
|
||||
sq_fn.append(fn)
|
||||
sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
|
||||
sq_hash.append(self.rqdata.runq_hash[realtask])
|
||||
@@ -1632,7 +1650,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
|
||||
|
||||
taskname = self.rqdata.runq_task[realtask] + "_setscene"
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask], recurse = True):
|
||||
logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_failoutright(task)
|
||||
@@ -1644,7 +1662,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.task_failoutright(task)
|
||||
return True
|
||||
|
||||
if self.rq.check_stamp_task(realtask, taskname, cache=self.stampcache):
|
||||
if self.rq.check_stamp_task(realtask, taskname):
|
||||
logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
|
||||
task, self.rqdata.get_user_idstring(realtask))
|
||||
self.task_skip(task)
|
||||
@@ -1675,9 +1693,6 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
|
||||
self.rq.scenequeue_covered = set()
|
||||
for task in oldcovered:
|
||||
self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
|
||||
self.rq.scenequeue_notcovered = set()
|
||||
for task in self.scenequeue_notcovered:
|
||||
self.rq.scenequeue_notcovered.add(self.rqdata.runq_setscene[task])
|
||||
|
||||
logger.debug(1, 'We can skip tasks %s', sorted(self.rq.scenequeue_covered))
|
||||
|
||||
@@ -1761,6 +1776,15 @@ class runQueueTaskCompleted(runQueueEvent):
|
||||
Event notifing a task completed
|
||||
"""
|
||||
|
||||
def check_stamp_fn(fn, taskname, d):
|
||||
rqexe = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY")
|
||||
fn = d.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2")
|
||||
fnid = rqexe.rqdata.taskData.getfn_id(fn)
|
||||
taskid = rqexe.rqdata.get_task_id(fnid, taskname)
|
||||
if taskid is not None:
|
||||
return rqexe.rq.check_stamp_task(taskid)
|
||||
return None
|
||||
|
||||
class runQueuePipe():
|
||||
"""
|
||||
Abstraction for a pipe between a worker thread and the server
|
||||
@@ -1768,7 +1792,7 @@ class runQueuePipe():
|
||||
def __init__(self, pipein, pipeout, d):
|
||||
self.input = pipein
|
||||
pipeout.close()
|
||||
bb.utils.nonblockingfd(self.input)
|
||||
fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
self.queue = ""
|
||||
self.d = d
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ class ServerCommunicator():
|
||||
if self.connection.poll(.5):
|
||||
return self.connection.recv()
|
||||
else:
|
||||
return None, "Timeout while attempting to communicate with bitbake server"
|
||||
return None
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import bb.data
|
||||
|
||||
logger = logging.getLogger('BitBake.SigGen')
|
||||
@@ -48,16 +47,9 @@ class SignatureGenerator(object):
|
||||
def stampfile(self, stampbase, file_name, taskname, extrainfo):
|
||||
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
|
||||
|
||||
def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
|
||||
return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
|
||||
|
||||
def dump_sigtask(self, fn, task, stampbase, runtime):
|
||||
return
|
||||
|
||||
def invalidate_task(self, task, d, fn):
|
||||
bb.build.del_stamp(task, d, fn)
|
||||
|
||||
|
||||
class SignatureGeneratorBasic(SignatureGenerator):
|
||||
"""
|
||||
"""
|
||||
@@ -68,7 +60,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
self.taskhash = {}
|
||||
self.taskdeps = {}
|
||||
self.runtaskdeps = {}
|
||||
self.file_checksum_values = {}
|
||||
self.gendeps = {}
|
||||
self.lookupcache = {}
|
||||
self.pkgnameextract = re.compile("(?P<fn>.*)\..*")
|
||||
@@ -116,10 +107,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
data = data + dep
|
||||
if dep in lookupcache:
|
||||
var = lookupcache[dep]
|
||||
elif dep[-1] == ']':
|
||||
vf = dep[:-1].split('[')
|
||||
var = d.getVarFlag(vf[0], vf[1], False)
|
||||
lookupcache[dep] = var
|
||||
else:
|
||||
var = d.getVar(dep, False)
|
||||
lookupcache[dep] = var
|
||||
@@ -161,20 +148,10 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
return False
|
||||
return True
|
||||
|
||||
def read_taint(self, fn, task, stampbase):
|
||||
taint = None
|
||||
try:
|
||||
with open(stampbase + '.' + task + '.taint', 'r') as taintf:
|
||||
taint = taintf.read()
|
||||
except IOError:
|
||||
pass
|
||||
return taint
|
||||
|
||||
def get_taskhash(self, fn, task, deps, dataCache):
|
||||
k = fn + "." + task
|
||||
data = dataCache.basetaskhash[k]
|
||||
self.runtaskdeps[k] = []
|
||||
self.file_checksum_values[k] = {}
|
||||
recipename = dataCache.pkg_fn[fn]
|
||||
for dep in sorted(deps, key=clean_basepath):
|
||||
depname = dataCache.pkg_fn[self.pkgnameextract.search(dep).group('fn')]
|
||||
@@ -184,17 +161,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
|
||||
data = data + self.taskhash[dep]
|
||||
self.runtaskdeps[k].append(dep)
|
||||
|
||||
if task in dataCache.file_checksums[fn]:
|
||||
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
|
||||
for (f,cs) in checksums:
|
||||
self.file_checksum_values[k][f] = cs
|
||||
data = data + cs
|
||||
|
||||
taint = self.read_taint(fn, task, dataCache.stamp[fn])
|
||||
if taint:
|
||||
data = data + taint
|
||||
|
||||
h = hashlib.md5(data).hexdigest()
|
||||
self.taskhash[k] = h
|
||||
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
|
||||
@@ -231,29 +197,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
|
||||
if runtime and k in self.taskhash:
|
||||
data['runtaskdeps'] = self.runtaskdeps[k]
|
||||
data['file_checksum_values'] = self.file_checksum_values[k]
|
||||
data['runtaskhashes'] = {}
|
||||
for dep in data['runtaskdeps']:
|
||||
data['runtaskhashes'][dep] = self.taskhash[dep]
|
||||
|
||||
taint = self.read_taint(fn, task, stampbase)
|
||||
if taint:
|
||||
data['taint'] = taint
|
||||
|
||||
fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
|
||||
try:
|
||||
with os.fdopen(fd, "wb") as stream:
|
||||
p = pickle.dump(data, stream, -1)
|
||||
stream.flush()
|
||||
os.fsync(fd)
|
||||
os.chmod(tmpfile, 0664)
|
||||
os.rename(tmpfile, sigfile)
|
||||
except (OSError, IOError), err:
|
||||
try:
|
||||
os.unlink(tmpfile)
|
||||
except OSError:
|
||||
pass
|
||||
raise err
|
||||
p = pickle.Pickler(file(sigfile, "wb"), -1)
|
||||
p.dump(data)
|
||||
|
||||
def dump_sigs(self, dataCache):
|
||||
for fn in self.taskdeps:
|
||||
@@ -269,27 +218,18 @@ class SignatureGeneratorBasic(SignatureGenerator):
|
||||
class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
|
||||
name = "basichash"
|
||||
|
||||
def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False):
|
||||
def stampfile(self, stampbase, fn, taskname, extrainfo):
|
||||
if taskname != "do_setscene" and taskname.endswith("_setscene"):
|
||||
k = fn + "." + taskname[:-9]
|
||||
else:
|
||||
k = fn + "." + taskname
|
||||
if clean:
|
||||
h = "*"
|
||||
elif k in self.taskhash:
|
||||
if k in self.taskhash:
|
||||
h = self.taskhash[k]
|
||||
else:
|
||||
# If k is not in basehash, then error
|
||||
h = self.basehash[k]
|
||||
return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
|
||||
|
||||
def stampcleanmask(self, stampbase, fn, taskname, extrainfo):
|
||||
return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True)
|
||||
|
||||
def invalidate_task(self, task, d, fn):
|
||||
bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task))
|
||||
bb.build.write_taint(task, d, fn)
|
||||
|
||||
def dump_this_task(outfile, d):
|
||||
import bb.parse
|
||||
fn = d.getVar("BB_FILENAME", True)
|
||||
@@ -298,7 +238,7 @@ def dump_this_task(outfile, d):
|
||||
|
||||
def clean_basepath(a):
|
||||
if a.startswith("virtual:"):
|
||||
b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
|
||||
b = a.rsplit(":", 1)[0] + a.rsplit("/", 1)[1]
|
||||
else:
|
||||
b = a.rsplit("/", 1)[1]
|
||||
return b
|
||||
@@ -309,12 +249,10 @@ def clean_basepaths(a):
|
||||
b[clean_basepath(x)] = a[x]
|
||||
return b
|
||||
|
||||
def compare_sigfiles(a, b, recursecb = None):
|
||||
output = []
|
||||
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
def compare_sigfiles(a, b):
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
p2 = pickle.Unpickler(open(b, "rb"))
|
||||
p2 = pickle.Unpickler(file(b, "rb"))
|
||||
b_data = p2.load()
|
||||
|
||||
def dict_diff(a, b, whitelist=set()):
|
||||
@@ -330,123 +268,89 @@ def compare_sigfiles(a, b, recursecb = None):
|
||||
return changed, added, removed
|
||||
|
||||
if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
|
||||
output.append("basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist']))
|
||||
print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
|
||||
if a_data['basewhitelist'] and b_data['basewhitelist']:
|
||||
output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist']))
|
||||
print "changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])
|
||||
|
||||
if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
|
||||
output.append("taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist']))
|
||||
print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
|
||||
if a_data['taskwhitelist'] and b_data['taskwhitelist']:
|
||||
output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist']))
|
||||
print "changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])
|
||||
|
||||
if a_data['taskdeps'] != b_data['taskdeps']:
|
||||
output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
|
||||
print "Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
|
||||
|
||||
if a_data['basehash'] != b_data['basehash']:
|
||||
output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash']))
|
||||
print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])
|
||||
|
||||
changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist'])
|
||||
if changed:
|
||||
for dep in changed:
|
||||
output.append("List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
|
||||
print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
|
||||
if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
|
||||
output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
|
||||
print "changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])
|
||||
if added:
|
||||
for dep in added:
|
||||
output.append("Dependency on variable %s was added" % (dep))
|
||||
print "Dependency on variable %s was added" % (dep)
|
||||
if removed:
|
||||
for dep in removed:
|
||||
output.append("Dependency on Variable %s was removed" % (dep))
|
||||
print "Dependency on Variable %s was removed" % (dep)
|
||||
|
||||
|
||||
changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
|
||||
if changed:
|
||||
for dep in changed:
|
||||
output.append("Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep]))
|
||||
|
||||
changed, added, removed = dict_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
|
||||
if changed:
|
||||
for f in changed:
|
||||
output.append("Checksum for file %s changed from %s to %s" % (f, a_data['file_checksum_values'][f], b_data['file_checksum_values'][f]))
|
||||
if added:
|
||||
for f in added:
|
||||
output.append("Dependency on checksum of file %s was added" % (f))
|
||||
if removed:
|
||||
for f in removed:
|
||||
output.append("Dependency on checksum of file %s was removed" % (f))
|
||||
|
||||
print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
|
||||
|
||||
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
|
||||
a = a_data['runtaskhashes']
|
||||
b = b_data['runtaskhashes']
|
||||
a = clean_basepaths(a_data['runtaskhashes'])
|
||||
b = clean_basepaths(b_data['runtaskhashes'])
|
||||
changed, added, removed = dict_diff(a, b)
|
||||
if added:
|
||||
for dep in added:
|
||||
bdep_found = False
|
||||
if removed:
|
||||
for bdep in removed:
|
||||
if a[dep] == b[bdep]:
|
||||
#output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
|
||||
bdep_found = True
|
||||
if not bdep_found:
|
||||
output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), a[dep]))
|
||||
bdep_found = False
|
||||
if removed:
|
||||
for bdep in removed:
|
||||
if a[dep] == b[bdep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (dep, bdep)
|
||||
bdep_found = True
|
||||
if not bdep_found:
|
||||
print "Dependency on task %s was added with hash %s" % (dep, a[dep])
|
||||
if removed:
|
||||
for dep in removed:
|
||||
adep_found = False
|
||||
if added:
|
||||
for adep in added:
|
||||
if a[adep] == b[dep]:
|
||||
#output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
|
||||
adep_found = True
|
||||
if not adep_found:
|
||||
output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), b[dep]))
|
||||
adep_found = False
|
||||
if added:
|
||||
for adep in added:
|
||||
if a[adep] == b[dep]:
|
||||
#print "Dependency on task %s was replaced by %s with same hash" % (adep, dep)
|
||||
adep_found = True
|
||||
if not adep_found:
|
||||
print "Dependency on task %s was removed with hash %s" % (dep, b[dep])
|
||||
if changed:
|
||||
for dep in changed:
|
||||
output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep]))
|
||||
if callable(recursecb):
|
||||
recout = recursecb(dep, a[dep], b[dep])
|
||||
if recout:
|
||||
output.extend(recout)
|
||||
|
||||
a_taint = a_data.get('taint', None)
|
||||
b_taint = b_data.get('taint', None)
|
||||
if a_taint != b_taint:
|
||||
output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint))
|
||||
|
||||
return output
|
||||
|
||||
print "Hash for dependent task %s changed from %s to %s" % (dep, a[dep], b[dep])
|
||||
|
||||
def dump_sigfile(a):
|
||||
output = []
|
||||
|
||||
p1 = pickle.Unpickler(open(a, "rb"))
|
||||
p1 = pickle.Unpickler(file(a, "rb"))
|
||||
a_data = p1.load()
|
||||
|
||||
output.append("basewhitelist: %s" % (a_data['basewhitelist']))
|
||||
print "basewhitelist: %s" % (a_data['basewhitelist'])
|
||||
|
||||
output.append("taskwhitelist: %s" % (a_data['taskwhitelist']))
|
||||
print "taskwhitelist: %s" % (a_data['taskwhitelist'])
|
||||
|
||||
output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
|
||||
print "Task dependencies: %s" % (sorted(a_data['taskdeps']))
|
||||
|
||||
output.append("basehash: %s" % (a_data['basehash']))
|
||||
print "basehash: %s" % (a_data['basehash'])
|
||||
|
||||
for dep in a_data['gendeps']:
|
||||
output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep]))
|
||||
print "List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])
|
||||
|
||||
for dep in a_data['varvals']:
|
||||
output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
|
||||
print "Variable %s value is %s" % (dep, a_data['varvals'][dep])
|
||||
|
||||
if 'runtaskdeps' in a_data:
|
||||
output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps']))
|
||||
|
||||
if 'file_checksum_values' in a_data:
|
||||
output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values']))
|
||||
print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
|
||||
|
||||
if 'runtaskhashes' in a_data:
|
||||
for dep in a_data['runtaskhashes']:
|
||||
output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
|
||||
|
||||
if 'taint' in a_data:
|
||||
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
|
||||
|
||||
return output
|
||||
print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
|
||||
|
||||
@@ -55,7 +55,6 @@ class TaskData:
|
||||
self.tasks_name = []
|
||||
self.tasks_tdepends = []
|
||||
self.tasks_idepends = []
|
||||
self.tasks_irdepends = []
|
||||
# Cache to speed up task ID lookups
|
||||
self.tasks_lookup = {}
|
||||
|
||||
@@ -116,16 +115,6 @@ class TaskData:
|
||||
ids.append(self.tasks_lookup[fnid][task])
|
||||
return ids
|
||||
|
||||
def gettask_id_fromfnid(self, fnid, task):
|
||||
"""
|
||||
Return an ID number for the task matching fnid and task.
|
||||
"""
|
||||
if fnid in self.tasks_lookup:
|
||||
if task in self.tasks_lookup[fnid]:
|
||||
return self.tasks_lookup[fnid][task]
|
||||
|
||||
return None
|
||||
|
||||
def gettask_id(self, fn, task, create = True):
|
||||
"""
|
||||
Return an ID number for the task matching fn and task.
|
||||
@@ -145,7 +134,6 @@ class TaskData:
|
||||
self.tasks_fnid.append(fnid)
|
||||
self.tasks_tdepends.append([])
|
||||
self.tasks_idepends.append([])
|
||||
self.tasks_irdepends.append([])
|
||||
|
||||
listid = len(self.tasks_name) - 1
|
||||
|
||||
@@ -176,9 +164,6 @@ class TaskData:
|
||||
# Work out task dependencies
|
||||
parentids = []
|
||||
for dep in task_deps['parents'][task]:
|
||||
if dep not in task_deps['tasks']:
|
||||
bb.debug(2, "Not adding dependeny of %s on %s since %s does not exist" % (task, dep, dep))
|
||||
continue
|
||||
parentid = self.gettask_id(fn, dep)
|
||||
parentids.append(parentid)
|
||||
taskid = self.gettask_id(fn, task)
|
||||
@@ -193,15 +178,6 @@ class TaskData:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_idepends[taskid].extend(ids)
|
||||
if 'rdepends' in task_deps and task in task_deps['rdepends']:
|
||||
ids = []
|
||||
for dep in task_deps['rdepends'][task].split():
|
||||
if dep:
|
||||
if ":" not in dep:
|
||||
bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep))
|
||||
ids.append(((self.getrun_id(dep.split(":")[0])), dep.split(":")[1]))
|
||||
self.tasks_irdepends[taskid].extend(ids)
|
||||
|
||||
|
||||
# Work out build dependencies
|
||||
if not fnid in self.depids:
|
||||
@@ -485,7 +461,6 @@ class TaskData:
|
||||
providers_list.append(dataCache.pkg_fn[fn])
|
||||
bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
|
||||
self.consider_msgs_cache.append(item)
|
||||
raise bb.providers.MultipleRProvider(item)
|
||||
|
||||
# run through the list until we find one that we can build
|
||||
for fn in eligible:
|
||||
@@ -558,11 +533,6 @@ class TaskData:
|
||||
dependees = self.get_rdependees(targetid)
|
||||
for fnid in dependees:
|
||||
self.fail_fnid(fnid, missing_list)
|
||||
for taskid in xrange(len(self.tasks_irdepends)):
|
||||
irdepends = self.tasks_irdepends[taskid]
|
||||
for (idependid, idependtask) in irdepends:
|
||||
if idependid == targetid:
|
||||
self.fail_fnid(self.tasks_fnid[taskid], missing_list)
|
||||
|
||||
def add_unresolved(self, cfgData, dataCache):
|
||||
"""
|
||||
@@ -584,7 +554,7 @@ class TaskData:
|
||||
try:
|
||||
self.add_rprovider(cfgData, dataCache, target)
|
||||
added = added + 1
|
||||
except (bb.providers.NoRProvider, bb.providers.MultipleRProvider):
|
||||
except bb.providers.NoRProvider:
|
||||
self.remove_runtarget(self.getrun_id(target))
|
||||
logger.debug(1, "Resolved " + str(added) + " extra dependencies")
|
||||
if added == 0:
|
||||
|
||||
@@ -1,369 +0,0 @@
|
||||
#
|
||||
# BitBake Test for codeparser.py
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import logging
|
||||
import bb
|
||||
|
||||
logger = logging.getLogger('BitBake.TestCodeParser')
|
||||
|
||||
import bb.data
|
||||
|
||||
class ReferenceTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
|
||||
def setEmptyVars(self, varlist):
|
||||
for k in varlist:
|
||||
self.d.setVar(k, "")
|
||||
|
||||
def setValues(self, values):
|
||||
for k, v in values.items():
|
||||
self.d.setVar(k, v)
|
||||
|
||||
def assertReferences(self, refs):
|
||||
self.assertEqual(self.references, refs)
|
||||
|
||||
def assertExecs(self, execs):
|
||||
self.assertEqual(self.execs, execs)
|
||||
|
||||
class VariableReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
self.references = parsedvar.references
|
||||
|
||||
def test_simple_reference(self):
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("${FOO}")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
|
||||
def test_nested_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.d.setVar("FOO", "BAR")
|
||||
self.parseExpression("${${FOO}}")
|
||||
self.assertReferences(set(["FOO", "BAR"]))
|
||||
|
||||
def test_python_reference(self):
|
||||
self.setEmptyVars(["BAR"])
|
||||
self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}")
|
||||
self.assertReferences(set(["BAR"]))
|
||||
|
||||
class ShellReferenceTest(ReferenceTest):
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.ShellParser("ParserTest", logger)
|
||||
parser.parse_shell(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references
|
||||
self.execs = parser.execs
|
||||
|
||||
def test_quotes_inside_assign(self):
|
||||
self.parseExpression('foo=foo"bar"baz')
|
||||
self.assertReferences(set([]))
|
||||
|
||||
def test_quotes_inside_arg(self):
|
||||
self.parseExpression('sed s#"bar baz"#"alpha beta"#g')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_arg_continuation(self):
|
||||
self.parseExpression("sed -i -e s,foo,bar,g \\\n *.pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_dollar_in_quoted(self):
|
||||
self.parseExpression('sed -i -e "foo$" *.pc')
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
def test_quotes_inside_arg_continuation(self):
|
||||
self.setEmptyVars(["bindir", "D", "libdir"])
|
||||
self.parseExpression("""
|
||||
sed -i -e s#"moc_location=.*$"#"moc_location=${bindir}/moc4"# \\
|
||||
-e s#"uic_location=.*$"#"uic_location=${bindir}/uic4"# \\
|
||||
${D}${libdir}/pkgconfig/*.pc
|
||||
""")
|
||||
self.assertReferences(set(["bindir", "D", "libdir"]))
|
||||
|
||||
def test_assign_subshell_expansion(self):
|
||||
self.parseExpression("foo=$(echo bar)")
|
||||
self.assertExecs(set(["echo"]))
|
||||
|
||||
def test_shell_unexpanded(self):
|
||||
self.setEmptyVars(["QT_BASE_NAME"])
|
||||
self.parseExpression('echo "${QT_BASE_NAME}"')
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["QT_BASE_NAME"]))
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.parseExpression("sed -i -e 's:IP{:I${:g' $pc")
|
||||
self.assertExecs(set(["sed"]))
|
||||
|
||||
|
||||
def test_until(self):
|
||||
self.parseExpression("until false; do echo true; done")
|
||||
self.assertExecs(set(["false", "echo"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_case(self):
|
||||
self.parseExpression("""
|
||||
case $foo in
|
||||
*)
|
||||
bar
|
||||
;;
|
||||
esac
|
||||
""")
|
||||
self.assertExecs(set(["bar"]))
|
||||
self.assertReferences(set())
|
||||
|
||||
def test_assign_exec(self):
|
||||
self.parseExpression("a=b c='foo bar' alpha 1 2 3")
|
||||
self.assertExecs(set(["alpha"]))
|
||||
|
||||
def test_redirect_to_file(self):
|
||||
self.setEmptyVars(["foo"])
|
||||
self.parseExpression("echo foo >${foo}/bar")
|
||||
self.assertExecs(set(["echo"]))
|
||||
self.assertReferences(set(["foo"]))
|
||||
|
||||
def test_heredoc(self):
|
||||
self.setEmptyVars(["theta"])
|
||||
self.parseExpression("""
|
||||
cat <<END
|
||||
alpha
|
||||
beta
|
||||
${theta}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(["theta"]))
|
||||
|
||||
def test_redirect_from_heredoc(self):
|
||||
v = ["B", "SHADOW_MAILDIR", "SHADOW_MAILFILE", "SHADOW_UTMPDIR", "SHADOW_LOGDIR", "bindir"]
|
||||
self.setEmptyVars(v)
|
||||
self.parseExpression("""
|
||||
cat <<END >${B}/cachedpaths
|
||||
shadow_cv_maildir=${SHADOW_MAILDIR}
|
||||
shadow_cv_mailfile=${SHADOW_MAILFILE}
|
||||
shadow_cv_utmpdir=${SHADOW_UTMPDIR}
|
||||
shadow_cv_logdir=${SHADOW_LOGDIR}
|
||||
shadow_cv_passwd_dir=${bindir}
|
||||
END
|
||||
""")
|
||||
self.assertReferences(set(v))
|
||||
self.assertExecs(set(["cat"]))
|
||||
|
||||
# def test_incomplete_command_expansion(self):
|
||||
# self.assertRaises(reftracker.ShellSyntaxError, reftracker.execs,
|
||||
# bbvalue.shparse("cp foo`", self.d), self.d)
|
||||
|
||||
# def test_rogue_dollarsign(self):
|
||||
# self.setValues({"D" : "/tmp"})
|
||||
# self.parseExpression("install -d ${D}$")
|
||||
# self.assertReferences(set(["D"]))
|
||||
# self.assertExecs(set(["install"]))
|
||||
|
||||
|
||||
class PythonReferenceTest(ReferenceTest):
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
if hasattr(bb.utils, "_context"):
|
||||
self.context = bb.utils._context
|
||||
else:
|
||||
import __builtin__
|
||||
self.context = __builtin__.__dict__
|
||||
|
||||
def parseExpression(self, exp):
|
||||
parsedvar = self.d.expandWithRefs(exp, None)
|
||||
parser = bb.codeparser.PythonParser("ParserTest", logger)
|
||||
parser.parse_python(parsedvar.value)
|
||||
|
||||
self.references = parsedvar.references | parser.references
|
||||
self.execs = parser.execs
|
||||
|
||||
@staticmethod
|
||||
def indent(value):
|
||||
"""Python Snippets have to be indented, python values don't have to
|
||||
be. These unit tests are testing snippets."""
|
||||
return " " + value
|
||||
|
||||
def test_getvar_reference(self):
|
||||
self.parseExpression("bb.data.getVar('foo', d, True)")
|
||||
self.assertReferences(set(["foo"]))
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_computed_reference(self):
|
||||
self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set())
|
||||
|
||||
def test_getvar_exec_reference(self):
|
||||
self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')")
|
||||
self.assertReferences(set())
|
||||
self.assertExecs(set(["eval"]))
|
||||
|
||||
def test_var_reference(self):
|
||||
self.context["foo"] = lambda x: x
|
||||
self.setEmptyVars(["FOO"])
|
||||
self.parseExpression("foo('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["foo"]))
|
||||
del self.context["foo"]
|
||||
|
||||
def test_var_exec(self):
|
||||
for etype in ("func", "task"):
|
||||
self.d.setVar("do_something", "echo 'hi mom! ${FOO}'")
|
||||
self.d.setVarFlag("do_something", etype, True)
|
||||
self.parseExpression("bb.build.exec_func('do_something', d)")
|
||||
self.assertReferences(set(["do_something"]))
|
||||
|
||||
def test_function_reference(self):
|
||||
self.context["testfunc"] = lambda msg: bb.msg.note(1, None, msg)
|
||||
self.d.setVar("FOO", "Hello, World!")
|
||||
self.parseExpression("testfunc('${FOO}')")
|
||||
self.assertReferences(set(["FOO"]))
|
||||
self.assertExecs(set(["testfunc"]))
|
||||
del self.context["testfunc"]
|
||||
|
||||
def test_qualified_function_reference(self):
|
||||
self.parseExpression("time.time()")
|
||||
self.assertExecs(set(["time.time"]))
|
||||
|
||||
def test_qualified_function_reference_2(self):
|
||||
self.parseExpression("os.path.dirname('/foo/bar')")
|
||||
self.assertExecs(set(["os.path.dirname"]))
|
||||
|
||||
def test_qualified_function_reference_nested(self):
|
||||
self.parseExpression("time.strftime('%Y%m%d',time.gmtime())")
|
||||
self.assertExecs(set(["time.strftime", "time.gmtime"]))
|
||||
|
||||
def test_function_reference_chained(self):
|
||||
self.context["testget"] = lambda: "\tstrip me "
|
||||
self.parseExpression("testget().strip()")
|
||||
self.assertExecs(set(["testget"]))
|
||||
del self.context["testget"]
|
||||
|
||||
|
||||
class DependencyReferenceTest(ReferenceTest):
|
||||
|
||||
pydata = """
|
||||
bb.data.getVar('somevar', d, True)
|
||||
def test(d):
|
||||
foo = 'bar %s' % 'foo'
|
||||
def test2(d):
|
||||
d.getVar(foo, True)
|
||||
d.getVar('bar', False)
|
||||
test2(d)
|
||||
|
||||
def a():
|
||||
\"\"\"some
|
||||
stuff
|
||||
\"\"\"
|
||||
return "heh"
|
||||
|
||||
test(d)
|
||||
|
||||
bb.data.expand(bb.data.getVar("something", False, d), d)
|
||||
bb.data.expand("${inexpand} somethingelse", d)
|
||||
bb.data.getVar(a(), d, False)
|
||||
"""
|
||||
|
||||
def test_python(self):
|
||||
self.d.setVar("FOO", self.pydata)
|
||||
self.setEmptyVars(["inexpand", "a", "test2", "test"])
|
||||
self.d.setVarFlags("FOO", {"func": True, "python": True})
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
|
||||
|
||||
|
||||
shelldata = """
|
||||
foo () {
|
||||
bar
|
||||
}
|
||||
{
|
||||
echo baz
|
||||
$(heh)
|
||||
eval `moo`
|
||||
}
|
||||
a=b
|
||||
c=d
|
||||
(
|
||||
true && false
|
||||
test -f foo
|
||||
testval=something
|
||||
$testval
|
||||
) || aiee
|
||||
! inverted
|
||||
echo ${somevar}
|
||||
|
||||
case foo in
|
||||
bar)
|
||||
echo bar
|
||||
;;
|
||||
baz)
|
||||
echo baz
|
||||
;;
|
||||
foo*)
|
||||
echo foo
|
||||
;;
|
||||
esac
|
||||
"""
|
||||
|
||||
def test_shell(self):
|
||||
execs = ["bar", "echo", "heh", "moo", "true", "aiee"]
|
||||
self.d.setVar("somevar", "heh")
|
||||
self.d.setVar("inverted", "echo inverted...")
|
||||
self.d.setVarFlag("inverted", "func", True)
|
||||
self.d.setVar("FOO", self.shelldata)
|
||||
self.d.setVarFlags("FOO", {"func": True})
|
||||
self.setEmptyVars(execs)
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["somevar", "inverted"] + execs))
|
||||
|
||||
|
||||
def test_vardeps(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
def test_vardeps_expand(self):
|
||||
self.d.setVar("oe_libinstall", "echo test")
|
||||
self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
|
||||
|
||||
deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d)
|
||||
|
||||
self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
#Currently no wildcard support
|
||||
#def test_vardeps_wildcards(self):
|
||||
# self.d.setVar("oe_libinstall", "echo test")
|
||||
# self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
|
||||
# self.d.setVarFlag("FOO", "vardeps", "oe_*")
|
||||
# self.assertEquals(deps, set(["oe_libinstall"]))
|
||||
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for Copy-on-Write (cow.py)
|
||||
#
|
||||
# Copyright 2006 Holger Freyther <freyther@handhelds.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import os
|
||||
|
||||
class COWTestCase(unittest.TestCase):
|
||||
"""
|
||||
Test case for the COW module from mithro
|
||||
"""
|
||||
|
||||
def testGetSet(self):
|
||||
"""
|
||||
Test and set
|
||||
"""
|
||||
from bb.COW import COWDictBase
|
||||
a = COWDictBase.copy()
|
||||
|
||||
self.assertEquals(False, a.has_key('a'))
|
||||
|
||||
a['a'] = 'a'
|
||||
a['b'] = 'b'
|
||||
self.assertEquals(True, a.has_key('a'))
|
||||
self.assertEquals(True, a.has_key('b'))
|
||||
self.assertEquals('a', a['a'] )
|
||||
self.assertEquals('b', a['b'] )
|
||||
|
||||
def testCopyCopy(self):
|
||||
"""
|
||||
Test the copy of copies
|
||||
"""
|
||||
|
||||
from bb.COW import COWDictBase
|
||||
|
||||
# create two COW dict 'instances'
|
||||
b = COWDictBase.copy()
|
||||
c = COWDictBase.copy()
|
||||
|
||||
# assign some keys to one instance, some keys to another
|
||||
b['a'] = 10
|
||||
b['c'] = 20
|
||||
c['a'] = 30
|
||||
|
||||
# test separation of the two instances
|
||||
self.assertEquals(False, c.has_key('c'))
|
||||
self.assertEquals(30, c['a'])
|
||||
self.assertEquals(10, b['a'])
|
||||
|
||||
# test copy
|
||||
b_2 = b.copy()
|
||||
c_2 = c.copy()
|
||||
|
||||
self.assertEquals(False, c_2.has_key('c'))
|
||||
self.assertEquals(10, b_2['a'])
|
||||
|
||||
b_2['d'] = 40
|
||||
self.assertEquals(False, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
c_2['d'] = 30
|
||||
self.assertEquals(True, c_2.has_key('d'))
|
||||
self.assertEquals(True, b_2.has_key('d'))
|
||||
self.assertEquals(30, c_2['d'])
|
||||
self.assertEquals(40, b_2['d'])
|
||||
self.assertEquals(False, b.has_key('d'))
|
||||
self.assertEquals(False, c.has_key('d'))
|
||||
|
||||
# test copy of the copy
|
||||
c_3 = c_2.copy()
|
||||
b_3 = b_2.copy()
|
||||
b_3_2 = b_2.copy()
|
||||
|
||||
c_3['e'] = 4711
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(False, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
b_3['e'] = 'viel'
|
||||
self.assertEquals('viel', b_3['e'])
|
||||
self.assertEquals(4711, c_3['e'])
|
||||
self.assertEquals(False, c_2.has_key('e'))
|
||||
self.assertEquals(True, b_3.has_key('e'))
|
||||
self.assertEquals(False, b_3_2.has_key('e'))
|
||||
self.assertEquals(False, b_2.has_key('e'))
|
||||
|
||||
def testCow(self):
|
||||
from bb.COW import COWDictBase
|
||||
c = COWDictBase.copy()
|
||||
c['123'] = 1027
|
||||
c['other'] = 4711
|
||||
c['d'] = { 'abc' : 10, 'bcd' : 20 }
|
||||
|
||||
copy = c.copy()
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1027, copy['123'])
|
||||
self.assertEquals(4711, copy['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, copy['d'])
|
||||
|
||||
# cow it now
|
||||
copy['123'] = 1028
|
||||
copy['other'] = 4712
|
||||
copy['d']['abc'] = 20
|
||||
|
||||
|
||||
self.assertEquals(1027, c['123'])
|
||||
self.assertEquals(4711, c['other'])
|
||||
self.assertEquals({'abc':10, 'bcd':20}, c['d'])
|
||||
self.assertEquals(1028, copy['123'])
|
||||
self.assertEquals(4712, copy['other'])
|
||||
self.assertEquals({'abc':20, 'bcd':20}, copy['d'])
|
||||
@@ -1,252 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Data Store (data.py/data_smart.py)
|
||||
#
|
||||
# Copyright (C) 2010 Chris Larson
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
import bb.data
|
||||
|
||||
class DataExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "value of foo"
|
||||
self.d["bar"] = "value of bar"
|
||||
self.d["value of foo"] = "value of 'value of foo'"
|
||||
|
||||
def test_one_var(self):
|
||||
val = self.d.expand("${foo}")
|
||||
self.assertEqual(str(val), "value of foo")
|
||||
|
||||
def test_indirect_one_var(self):
|
||||
val = self.d.expand("${${foo}}")
|
||||
self.assertEqual(str(val), "value of 'value of foo'")
|
||||
|
||||
def test_indirect_and_another(self):
|
||||
val = self.d.expand("${${foo}} ${bar}")
|
||||
self.assertEqual(str(val), "value of 'value of foo' value of bar")
|
||||
|
||||
def test_python_snippet(self):
|
||||
val = self.d.expand("${@5*12}")
|
||||
self.assertEqual(str(val), "60")
|
||||
|
||||
def test_expand_in_python_snippet(self):
|
||||
val = self.d.expand("${@'boo ' + '${foo}'}")
|
||||
self.assertEqual(str(val), "boo value of foo")
|
||||
|
||||
def test_python_snippet_getvar(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_python_snippet_syntax_error(self):
|
||||
self.d.setVar("FOO", "${@foo = 5}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_runtime_error(self):
|
||||
self.d.setVar("FOO", "${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_python_snippet_error_path(self):
|
||||
self.d.setVar("FOO", "foo value ${BAR}")
|
||||
self.d.setVar("BAR", "bar value ${@int('test')}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_value_containing_value(self):
|
||||
val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}")
|
||||
self.assertEqual(str(val), "value of foo value of bar")
|
||||
|
||||
def test_reference_undefined_var(self):
|
||||
val = self.d.expand("${undefinedvar} meh")
|
||||
self.assertEqual(str(val), "${undefinedvar} meh")
|
||||
|
||||
def test_double_reference(self):
|
||||
self.d.setVar("BAR", "bar value")
|
||||
self.d.setVar("FOO", "${BAR} foo ${BAR}")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "bar value foo bar value")
|
||||
|
||||
def test_direct_recursion(self):
|
||||
self.d.setVar("FOO", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_indirect_recursion(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${BAZ}")
|
||||
self.d.setVar("BAZ", "${FOO}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_recursion_exception(self):
|
||||
self.d.setVar("FOO", "${BAR}")
|
||||
self.d.setVar("BAR", "${${@'FOO'}}")
|
||||
self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True)
|
||||
|
||||
def test_incomplete_varexp_single_quotes(self):
|
||||
self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc")
|
||||
val = self.d.getVar("FOO", True)
|
||||
self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc")
|
||||
|
||||
def test_nonstring(self):
|
||||
self.d.setVar("TEST", 5)
|
||||
val = self.d.getVar("TEST", True)
|
||||
self.assertEqual(str(val), "5")
|
||||
|
||||
def test_rename(self):
|
||||
self.d.renameVar("foo", "newfoo")
|
||||
self.assertEqual(self.d.getVar("newfoo"), "value of foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_deletion(self):
|
||||
self.d.delVar("foo")
|
||||
self.assertEqual(self.d.getVar("foo"), None)
|
||||
|
||||
def test_keys(self):
|
||||
keys = self.d.keys()
|
||||
self.assertEqual(keys, ['value of foo', 'foo', 'bar'])
|
||||
|
||||
class TestNestedExpansions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d["foo"] = "foo"
|
||||
self.d["bar"] = "bar"
|
||||
self.d["value of foobar"] = "187"
|
||||
|
||||
def test_refs(self):
|
||||
val = self.d.expand("${value of ${foo}${bar}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
#def test_python_refs(self):
|
||||
# val = self.d.expand("${@${@3}**2 + ${@4}**2}")
|
||||
# self.assertEqual(str(val), "25")
|
||||
|
||||
def test_ref_in_python_ref(self):
|
||||
val = self.d.expand("${@'${foo}' + 'bar'}")
|
||||
self.assertEqual(str(val), "foobar")
|
||||
|
||||
def test_python_ref_in_ref(self):
|
||||
val = self.d.expand("${${@'f'+'o'+'o'}}")
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
def test_deep_nesting(self):
|
||||
depth = 100
|
||||
val = self.d.expand("${" * depth + "foo" + "}" * depth)
|
||||
self.assertEqual(str(val), "foo")
|
||||
|
||||
#def test_deep_python_nesting(self):
|
||||
# depth = 50
|
||||
# val = self.d.expand("${@" * depth + "1" + "+1}" * depth)
|
||||
# self.assertEqual(str(val), str(depth + 1))
|
||||
|
||||
def test_mixed(self):
|
||||
val = self.d.expand("${value of ${@('${foo}'+'bar')[0:3]}${${@'BAR'.lower()}}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
def test_runtime(self):
|
||||
val = self.d.expand("${${@'value of' + ' f'+'o'+'o'+'b'+'a'+'r'}}")
|
||||
self.assertEqual(str(val), "187")
|
||||
|
||||
class TestMemoize(unittest.TestCase):
|
||||
def test_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("FOO", "bar")
|
||||
self.assertTrue(d.getVar("FOO") is d.getVar("FOO"))
|
||||
|
||||
def test_not_memoized(self):
|
||||
d1 = bb.data.init()
|
||||
d2 = bb.data.init()
|
||||
d1.setVar("FOO", "bar")
|
||||
d2.setVar("FOO", "bar2")
|
||||
self.assertTrue(d1.getVar("FOO") is not d2.getVar("FOO"))
|
||||
|
||||
def test_changed_after_memoized(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "value of foo")
|
||||
d.setVar("foo", "second value of foo")
|
||||
self.assertEqual(str(d.getVar("foo")), "second value of foo")
|
||||
|
||||
def test_same_value(self):
|
||||
d = bb.data.init()
|
||||
d.setVar("foo", "value of")
|
||||
d.setVar("bar", "value of")
|
||||
self.assertEqual(d.getVar("foo"),
|
||||
d.getVar("bar"))
|
||||
|
||||
class TestConcat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("FOO", "foo")
|
||||
self.d.setVar("VAL", "val")
|
||||
self.d.setVar("BAR", "bar")
|
||||
|
||||
def test_prepend(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val")
|
||||
|
||||
def test_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "val:bar")
|
||||
|
||||
def test_multiple_append(self):
|
||||
self.d.setVar("TEST", "${VAL}")
|
||||
self.d.prependVar("TEST", "${FOO}:")
|
||||
self.d.appendVar("TEST", ":val2")
|
||||
self.d.appendVar("TEST", ":${BAR}")
|
||||
self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar")
|
||||
|
||||
class TestOverrides(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("OVERRIDES", "foo:bar:local")
|
||||
self.d.setVar("TEST", "testvalue")
|
||||
|
||||
def test_no_override(self):
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue")
|
||||
|
||||
def test_one_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue2")
|
||||
|
||||
def test_multiple_override(self):
|
||||
self.d.setVar("TEST_bar", "testvalue2")
|
||||
self.d.setVar("TEST_local", "testvalue3")
|
||||
self.d.setVar("TEST_foo", "testvalue4")
|
||||
bb.data.update_data(self.d)
|
||||
self.assertEqual(self.d.getVar("TEST", True), "testvalue3")
|
||||
|
||||
|
||||
class TestFlags(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.d.setVar("foo", "value of foo")
|
||||
self.d.setVarFlag("foo", "flag1", "value of flag1")
|
||||
self.d.setVarFlag("foo", "flag2", "value of flag2")
|
||||
|
||||
def test_setflag(self):
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), "value of flag2")
|
||||
|
||||
def test_delflag(self):
|
||||
self.d.delVarFlag("foo", "flag2")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag1"), "value of flag1")
|
||||
self.assertEqual(self.d.getVarFlag("foo", "flag2"), None)
|
||||
|
||||
|
||||
@@ -1,191 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for the Fetcher (fetch2/)
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import subprocess
|
||||
import os
|
||||
import bb
|
||||
|
||||
|
||||
class FetcherTest(unittest.TestCase):
|
||||
|
||||
replaceuris = {
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
|
||||
: "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
|
||||
: "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
|
||||
: "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
|
||||
("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
|
||||
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
|
||||
: "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
|
||||
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
|
||||
("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
|
||||
: "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
|
||||
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
|
||||
: "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
|
||||
("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
|
||||
: "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
|
||||
("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
|
||||
: "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
|
||||
|
||||
#Renaming files doesn't work
|
||||
#("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
|
||||
#("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
|
||||
}
|
||||
|
||||
mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \
|
||||
"git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \
|
||||
"https://.*/.* file:///someotherpath/downloads/ \n" \
|
||||
"http://.*/.* file:///someotherpath/downloads/ \n"
|
||||
|
||||
def setUp(self):
|
||||
self.d = bb.data.init()
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.dldir = os.path.join(self.tempdir, "download")
|
||||
os.mkdir(self.dldir)
|
||||
self.d.setVar("DL_DIR", self.dldir)
|
||||
self.unpackdir = os.path.join(self.tempdir, "unpacked")
|
||||
os.mkdir(self.unpackdir)
|
||||
persistdir = os.path.join(self.tempdir, "persistdata")
|
||||
self.d.setVar("PERSISTENT_DIR", persistdir)
|
||||
|
||||
def tearDown(self):
|
||||
bb.utils.prunedir(self.tempdir)
|
||||
|
||||
def test_fetch(self):
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
fetcher.unpack(self.unpackdir)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
|
||||
self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.1/")), 9)
|
||||
|
||||
def test_fetch_mirror(self):
|
||||
self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def test_fetch_premirror(self):
|
||||
self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake")
|
||||
fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
|
||||
fetcher.download()
|
||||
self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
|
||||
|
||||
def gitfetcher(self, url1, url2):
|
||||
def checkrevision(self, fetcher):
|
||||
fetcher.unpack(self.unpackdir)
|
||||
revision = subprocess.check_output("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git").strip()
|
||||
self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
|
||||
self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
|
||||
self.d.setVar("SRCREV", "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
|
||||
fetcher = bb.fetch.Fetch([url1], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
# Wipe out the dldir clone and the unpacked source, turn off the network and check mirror tarball works
|
||||
bb.utils.prunedir(self.dldir + "/git2/")
|
||||
bb.utils.prunedir(self.unpackdir)
|
||||
self.d.setVar("BB_NO_NETWORK", "1")
|
||||
fetcher = bb.fetch.Fetch([url2], self.d)
|
||||
fetcher.download()
|
||||
checkrevision(self, fetcher)
|
||||
|
||||
def test_gitfetch(self):
|
||||
url1 = url2 = "git://git.openembedded.org/bitbake"
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror(self):
|
||||
url1 = "git://git.openembedded.org/bitbake"
|
||||
url2 = "git://someserver.org/bitbake"
|
||||
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror2(self):
|
||||
url1 = url2 = "git://someserver.org/bitbake"
|
||||
self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n")
|
||||
self.gitfetcher(url1, url2)
|
||||
|
||||
def test_gitfetch_premirror3(self):
|
||||
realurl = "git://git.openembedded.org/bitbake"
|
||||
dummyurl = "git://someserver.org/bitbake"
|
||||
self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
|
||||
os.chdir(self.tempdir)
|
||||
subprocess.check_output("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True)
|
||||
self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (dummyurl, self.sourcedir))
|
||||
self.gitfetcher(dummyurl, dummyurl)
|
||||
|
||||
def test_urireplace(self):
|
||||
for k, v in self.replaceuris.items():
|
||||
ud = bb.fetch.FetchData(k[0], self.d)
|
||||
ud.setup_localpath(self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
|
||||
newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
|
||||
self.assertEqual([v], newuris)
|
||||
|
||||
def test_urilist1(self):
|
||||
fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///somepath/downloads/bitbake-1.0.tar.gz', 'file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
def test_urilist2(self):
|
||||
# Catch https:// -> files:// bug
|
||||
fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
|
||||
mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
|
||||
uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
|
||||
self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
|
||||
|
||||
|
||||
class URLHandle(unittest.TestCase):
|
||||
|
||||
datatable = {
|
||||
"http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
|
||||
"cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
|
||||
"cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
|
||||
}
|
||||
|
||||
def test_decodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.decodeurl(k)
|
||||
self.assertEqual(result, v)
|
||||
|
||||
def test_encodeurl(self):
|
||||
for k, v in self.datatable.items():
|
||||
result = bb.fetch.encodeurl(v)
|
||||
self.assertEqual(result, k)
|
||||
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#
|
||||
# BitBake Tests for utils.py
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
|
||||
import unittest
|
||||
import bb
|
||||
|
||||
class VerCmpString(unittest.TestCase):
|
||||
|
||||
def test_vercmpstring(self):
|
||||
result = bb.utils.vercmp_string('1', '2')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('2', '1')
|
||||
self.assertTrue(result > 0)
|
||||
result = bb.utils.vercmp_string('1', '1.0')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1', '1.1')
|
||||
self.assertTrue(result < 0)
|
||||
result = bb.utils.vercmp_string('1.1', '1_p2')
|
||||
self.assertTrue(result < 0)
|
||||
|
||||
def test_explode_dep_versions(self):
|
||||
correctresult = {"foo" : ["= 1.10"]}
|
||||
result = bb.utils.explode_dep_versions2("foo (= 1.10)")
|
||||
self.assertEqual(result, correctresult)
|
||||
result = bb.utils.explode_dep_versions2("foo (=1.10)")
|
||||
self.assertEqual(result, correctresult)
|
||||
result = bb.utils.explode_dep_versions2("foo ( = 1.10)")
|
||||
self.assertEqual(result, correctresult)
|
||||
result = bb.utils.explode_dep_versions2("foo ( =1.10)")
|
||||
self.assertEqual(result, correctresult)
|
||||
result = bb.utils.explode_dep_versions2("foo ( = 1.10 )")
|
||||
self.assertEqual(result, correctresult)
|
||||
result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
|
||||
self.assertEqual(result, correctresult)
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
# tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
import os
|
||||
import sys
|
||||
|
||||
import bb.cache
|
||||
import bb.cooker
|
||||
import bb.providers
|
||||
import bb.utils
|
||||
from bb.cooker import state
|
||||
import bb.fetch2
|
||||
|
||||
class Tinfoil:
|
||||
def __init__(self):
|
||||
# Needed to avoid deprecation warnings with python 2.6
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
|
||||
# Set up logging
|
||||
self.logger = logging.getLogger('BitBake')
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||
bb.msg.addDefaultlogFilter(console)
|
||||
console.setFormatter(format)
|
||||
self.logger.addHandler(console)
|
||||
|
||||
initialenv = os.environ.copy()
|
||||
bb.utils.clean_environment()
|
||||
self.config = TinfoilConfig(parse_only=True)
|
||||
self.cooker = bb.cooker.BBCooker(self.config,
|
||||
self.register_idle_function,
|
||||
initialenv)
|
||||
self.config_data = self.cooker.configuration.data
|
||||
bb.providers.logger.setLevel(logging.ERROR)
|
||||
self.cooker_data = None
|
||||
|
||||
def register_idle_function(self, function, data):
|
||||
pass
|
||||
|
||||
def parseRecipes(self):
|
||||
sys.stderr.write("Parsing recipes..")
|
||||
self.logger.setLevel(logging.WARNING)
|
||||
|
||||
try:
|
||||
while self.cooker.state in (state.initial, state.parsing):
|
||||
self.cooker.updateCache()
|
||||
except KeyboardInterrupt:
|
||||
self.cooker.shutdown()
|
||||
self.cooker.updateCache()
|
||||
sys.exit(2)
|
||||
|
||||
self.logger.setLevel(logging.INFO)
|
||||
sys.stderr.write("done.\n")
|
||||
|
||||
self.cooker_data = self.cooker.status
|
||||
|
||||
def prepare(self, config_only = False):
|
||||
if not self.cooker_data:
|
||||
if config_only:
|
||||
self.cooker.parseConfiguration()
|
||||
self.cooker_data = self.cooker.status
|
||||
else:
|
||||
self.parseRecipes()
|
||||
|
||||
|
||||
class TinfoilConfig(object):
|
||||
def __init__(self, **options):
|
||||
self.pkgs_to_build = []
|
||||
self.debug_domains = []
|
||||
self.extra_assume_provided = []
|
||||
self.prefile = []
|
||||
self.postfile = []
|
||||
self.debug = 0
|
||||
self.__dict__.update(options)
|
||||
|
||||
def __getattr__(self, attribute):
|
||||
try:
|
||||
return super(TinfoilConfig, self).__getattribute__(attribute)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
@@ -23,13 +23,11 @@
|
||||
import gtk
|
||||
import pango
|
||||
import gobject
|
||||
import bb.process
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText, HobButton, HobInfoButton
|
||||
from bb.ui.crumbs.hobwidget import hic, HobNotebook, HobAltButton, HobWarpCellRendererText
|
||||
from bb.ui.crumbs.runningbuild import RunningBuildTreeView
|
||||
from bb.ui.crumbs.runningbuild import BuildFailureTreeView
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
|
||||
class BuildConfigurationTreeView(gtk.TreeView):
|
||||
def __init__ (self):
|
||||
@@ -83,27 +81,26 @@ class BuildConfigurationTreeView(gtk.TreeView):
|
||||
# renew the tree model after get the configuration messages
|
||||
self.set_model(listmodel)
|
||||
|
||||
def show(self, src_config_info, src_params):
|
||||
def show(self, src_config_info):
|
||||
vars = []
|
||||
vars.append(self.set_vars("BB version:", src_params.bb_version))
|
||||
vars.append(self.set_vars("Target arch:", src_params.target_arch))
|
||||
vars.append(self.set_vars("Target OS:", src_params.target_os))
|
||||
vars.append(self.set_vars("BB version:", src_config_info.bb_version))
|
||||
vars.append(self.set_vars("Target arch:", src_config_info.target_arch))
|
||||
vars.append(self.set_vars("Target OS:", src_config_info.target_os))
|
||||
vars.append(self.set_vars("Machine:", src_config_info.curr_mach))
|
||||
vars.append(self.set_vars("Distro:", src_config_info.curr_distro))
|
||||
vars.append(self.set_vars("Distro version:", src_params.distro_version))
|
||||
vars.append(self.set_vars("Distro version:", src_config_info.distro_version))
|
||||
vars.append(self.set_vars("SDK machine:", src_config_info.curr_sdk_machine))
|
||||
vars.append(self.set_vars("Tune features:", src_params.tune_pkgarch))
|
||||
vars.append(self.set_vars("Tune feature:", src_config_info.tune_pkgarch))
|
||||
vars.append(self.set_vars("Layers:", src_config_info.layers))
|
||||
|
||||
for path in src_config_info.layers:
|
||||
import os, os.path
|
||||
if os.path.exists(path):
|
||||
branch = bb.process.run('cd %s; git branch | grep "^* " | tr -d "* "' % path)[0]
|
||||
if branch.startswith("fatal:"):
|
||||
branch = "(unknown)"
|
||||
if branch:
|
||||
branch = branch.strip('\n')
|
||||
f = os.popen('cd %s; git branch 2>&1 | grep "^* " | tr -d "* "' % path)
|
||||
if f:
|
||||
branch = f.readline().lstrip('\n').rstrip('\n')
|
||||
vars.append(self.set_vars("Branch:", branch))
|
||||
f.close()
|
||||
break
|
||||
|
||||
self.set_config_model(vars)
|
||||
@@ -147,7 +144,7 @@ class BuildDetailsPage (HobPage):
|
||||
self.scrolled_view_config = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_config.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_config.add(self.config_tv)
|
||||
self.notebook.append_page(self.scrolled_view_config, "Build configuration")
|
||||
self.notebook.append_page(self.scrolled_view_config, gtk.Label("Build configuration"))
|
||||
|
||||
self.failure_tv = BuildFailureTreeView()
|
||||
self.failure_model = self.builder.handler.build.model.failure_model()
|
||||
@@ -155,19 +152,19 @@ class BuildDetailsPage (HobPage):
|
||||
self.scrolled_view_failure = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_failure.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_failure.add(self.failure_tv)
|
||||
self.notebook.append_page(self.scrolled_view_failure, "Issues")
|
||||
self.notebook.append_page(self.scrolled_view_failure, gtk.Label("Issues"))
|
||||
|
||||
self.build_tv = RunningBuildTreeView(readonly=True, hob=True)
|
||||
self.build_tv.set_model(self.builder.handler.build.model)
|
||||
self.scrolled_view_build = gtk.ScrolledWindow ()
|
||||
self.scrolled_view_build.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
self.scrolled_view_build.add(self.build_tv)
|
||||
self.notebook.append_page(self.scrolled_view_build, "Log")
|
||||
self.notebook.append_page(self.scrolled_view_build, gtk.Label("Log"))
|
||||
|
||||
self.builder.handler.build.model.connect_after("row-changed", self.scroll_to_present_row, self.scrolled_view_build.get_vadjustment(), self.build_tv)
|
||||
|
||||
self.button_box = gtk.HBox(False, 6)
|
||||
self.back_button = HobAltButton('<< Back')
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
@@ -201,133 +198,6 @@ class BuildDetailsPage (HobPage):
|
||||
for child in children:
|
||||
self.remove(child)
|
||||
|
||||
def add_build_fail_top_bar(self, actions, log_file=None):
|
||||
primary_action = "Edit %s" % actions
|
||||
|
||||
self.notebook.set_page("Issues")
|
||||
|
||||
color = HobColors.ERROR
|
||||
build_fail_top = gtk.EventBox()
|
||||
#build_fail_top.set_size_request(-1, 200)
|
||||
build_fail_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
|
||||
build_fail_tab = gtk.Table(14, 46, True)
|
||||
build_fail_top.add(build_fail_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INDI_ERROR_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_fail_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_fail_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='medium'>Check the \"Issues\" information for more details</span>")
|
||||
build_fail_tab.attach(label, 4, 40, 4, 9)
|
||||
|
||||
# create button 'Edit packages'
|
||||
action_button = HobButton(primary_action)
|
||||
#action_button.set_size_request(-1, 40)
|
||||
action_button.set_tooltip_text("Edit the %s parameters" % actions)
|
||||
action_button.connect('clicked', self.failure_primary_action_button_clicked_cb, primary_action)
|
||||
build_fail_tab.attach(action_button, 4, 13, 9, 12)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
build_fail_tab.attach(open_log_button, 14, 23, 9, 12)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
file_bug_button = HobAltButton('File a bug')
|
||||
file_bug_button.set_relief(gtk.RELIEF_HALF)
|
||||
file_bug_button.set_tooltip_text("Open the Yocto Project bug tracking website")
|
||||
file_bug_button.connect('clicked', self.failure_activate_file_bug_link_cb)
|
||||
build_fail_tab.attach(file_bug_button, attach_pos, attach_pos + 9, 9, 12)
|
||||
|
||||
return build_fail_top
|
||||
|
||||
def show_fail_page(self, title):
|
||||
self._remove_all_widget()
|
||||
self.title = "Hob cannot build your %s" % title
|
||||
|
||||
self.build_fail_bar = self.add_build_fail_top_bar(title, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_fail_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
|
||||
def add_build_stop_top_bar(self, action, log_file=None):
|
||||
color = HobColors.LIGHT_GRAY
|
||||
build_stop_top = gtk.EventBox()
|
||||
#build_stop_top.set_size_request(-1, 200)
|
||||
build_stop_top.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(color))
|
||||
build_stop_top.set_flags(gtk.CAN_DEFAULT)
|
||||
build_stop_top.grab_default()
|
||||
|
||||
build_stop_tab = gtk.Table(11, 46, True)
|
||||
build_stop_top.add(build_stop_tab)
|
||||
|
||||
icon = gtk.Image()
|
||||
icon_pix_buffer = gtk.gdk.pixbuf_new_from_file(hic.ICON_INFO_HOVER_FILE)
|
||||
icon.set_from_pixbuf(icon_pix_buffer)
|
||||
build_stop_tab.attach(icon, 1, 4, 0, 6)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup("<span size='x-large'><b>%s</b></span>" % self.title)
|
||||
build_stop_tab.attach(label, 4, 26, 0, 6)
|
||||
|
||||
action_button = HobButton("Edit %s" % action)
|
||||
action_button.set_size_request(-1, 40)
|
||||
if action == "image":
|
||||
action_button.set_tooltip_text("Edit the image parameters")
|
||||
elif action == "recipes":
|
||||
action_button.set_tooltip_text("Edit the included recipes")
|
||||
elif action == "packages":
|
||||
action_button.set_tooltip_text("Edit the included packages")
|
||||
action_button.connect('clicked', self.stop_primary_action_button_clicked_cb, action)
|
||||
build_stop_tab.attach(action_button, 4, 13, 6, 9)
|
||||
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.set_relief(gtk.RELIEF_HALF)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
open_log_button.connect('clicked', self.open_log_button_clicked_cb, log_file)
|
||||
build_stop_tab.attach(open_log_button, 14, 23, 6, 9)
|
||||
|
||||
attach_pos = (24 if log_file else 14)
|
||||
build_button = HobAltButton("Build new image")
|
||||
#build_button.set_size_request(-1, 40)
|
||||
build_button.set_tooltip_text("Create a new image from scratch")
|
||||
build_button.connect('clicked', self.new_image_button_clicked_cb)
|
||||
build_stop_tab.attach(build_button, attach_pos, attach_pos + 9, 6, 9)
|
||||
|
||||
return build_stop_top, action_button
|
||||
|
||||
def show_stop_page(self, action):
|
||||
self._remove_all_widget()
|
||||
self.title = "Build stopped"
|
||||
self.build_stop_bar, action_button = self.add_build_stop_top_bar(action, self.builder.current_logfile)
|
||||
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.build_stop_bar, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.vbox, expand=True, fill=True)
|
||||
|
||||
self.vbox.pack_start(self.notebook, expand=True, fill=True)
|
||||
self.show_all()
|
||||
self.back_button.hide()
|
||||
return action_button
|
||||
|
||||
def show_page(self, step):
|
||||
self._remove_all_widget()
|
||||
if step == self.builder.PACKAGE_GENERATING or step == self.builder.FAST_IMAGE_GENERATING:
|
||||
@@ -361,9 +231,6 @@ class BuildDetailsPage (HobPage):
|
||||
def back_button_clicked_cb(self, button):
|
||||
self.builder.show_configuration()
|
||||
|
||||
def new_image_button_clicked_cb(self, button):
|
||||
self.builder.reset()
|
||||
|
||||
def show_back_button(self):
|
||||
self.back_button.show()
|
||||
|
||||
@@ -371,7 +238,6 @@ class BuildDetailsPage (HobPage):
|
||||
self.builder.stop_build()
|
||||
|
||||
def hide_stop_button(self):
|
||||
self.stop_button.set_sensitive(False)
|
||||
self.stop_button.hide()
|
||||
|
||||
def scroll_to_present_row(self, model, path, iter, v_adj, treeview):
|
||||
@@ -382,28 +248,5 @@ class BuildDetailsPage (HobPage):
|
||||
if (v_adj.upper <= v_adj.page_size) or (v_adj.value == v_adj.upper - v_adj.page_size):
|
||||
treeview.scroll_to_cell(path)
|
||||
|
||||
def show_configurations(self, configurations, params):
|
||||
self.config_tv.show(configurations, params)
|
||||
|
||||
def failure_primary_action_button_clicked_cb(self, button, action):
|
||||
if "Edit recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "Edit packages" in action:
|
||||
self.builder.show_packages()
|
||||
elif "Edit image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def stop_primary_action_button_clicked_cb(self, button, action):
|
||||
if "recipes" in action:
|
||||
self.builder.show_recipes()
|
||||
elif "packages" in action:
|
||||
self.builder.show_packages(ask=False)
|
||||
elif "image" in action:
|
||||
self.builder.show_configuration()
|
||||
|
||||
def open_log_button_clicked_cb(self, button, log_file):
|
||||
if log_file:
|
||||
os.system("xdg-open /%s" % log_file)
|
||||
|
||||
def failure_activate_file_bug_link_cb(self, button):
|
||||
button.child.emit('activate-link', "http://bugzilla.yoctoproject.org")
|
||||
def show_configurations(self, configurations):
|
||||
self.config_tv.show(configurations)
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import gobject
|
||||
import logging
|
||||
from bb.ui.crumbs.runningbuild import RunningBuild
|
||||
from bb.ui.crumbs.hobwidget import hcc
|
||||
|
||||
class HobHandler(gobject.GObject):
|
||||
|
||||
@@ -41,9 +42,6 @@ class HobHandler(gobject.GObject):
|
||||
"command-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING,)),
|
||||
"sanity-failed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_INT)),
|
||||
"generating-data" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
@@ -59,16 +57,10 @@ class HobHandler(gobject.GObject):
|
||||
"parsing-completed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
"recipe-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
"package-populated" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
}
|
||||
|
||||
(GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO, SANITY_CHECK) = range(6)
|
||||
(SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_SANITY_CHECK, SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE) = range(11)
|
||||
(PARSE_CONFIG, GENERATE_CONFIGURATION, GENERATE_RECIPES, GENERATE_PACKAGES, GENERATE_IMAGE, POPULATE_PACKAGEINFO) = range(6)
|
||||
(SUB_PATH_LAYERS, SUB_FILES_DISTRO, SUB_FILES_MACH, SUB_FILES_SDKMACH, SUB_MATCH_CLASS, SUB_PARSE_CONFIG, SUB_GNERATE_TGTS, SUB_GENERATE_PKGINFO, SUB_BUILD_RECIPES, SUB_BUILD_IMAGE) = range(10)
|
||||
|
||||
def __init__(self, server, recipe_model, package_model):
|
||||
super(HobHandler, self).__init__()
|
||||
@@ -99,18 +91,6 @@ class HobHandler(gobject.GObject):
|
||||
self.emit("data-generated")
|
||||
self.generating = False
|
||||
|
||||
def runCommand(self, commandline):
|
||||
try:
|
||||
result, error = self.server.runCommand(commandline)
|
||||
if error:
|
||||
raise Exception("Error running command '%s': %s" % (commandline, error))
|
||||
return result
|
||||
except Exception as e:
|
||||
self.commands_async = []
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", "Hob Exception - %s" % (str(e)))
|
||||
return None
|
||||
|
||||
def run_next_command(self, initcmd=None):
|
||||
if initcmd != None:
|
||||
self.initcmd = initcmd
|
||||
@@ -125,77 +105,57 @@ class HobHandler(gobject.GObject):
|
||||
return
|
||||
|
||||
if next_command == self.SUB_PATH_LAYERS:
|
||||
self.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
self.server.runCommand(["findConfigFilePath", "bblayers.conf"])
|
||||
elif next_command == self.SUB_FILES_DISTRO:
|
||||
self.runCommand(["findConfigFiles", "DISTRO"])
|
||||
self.server.runCommand(["findConfigFiles", "DISTRO"])
|
||||
elif next_command == self.SUB_FILES_MACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE"])
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE"])
|
||||
elif next_command == self.SUB_FILES_SDKMACH:
|
||||
self.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
self.server.runCommand(["findConfigFiles", "MACHINE-SDK"])
|
||||
elif next_command == self.SUB_MATCH_CLASS:
|
||||
self.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
self.server.runCommand(["findFilesMatchingInDir", "rootfs_", "classes"])
|
||||
elif next_command == self.SUB_PARSE_CONFIG:
|
||||
self.runCommand(["parseConfigurationFiles", "", ""])
|
||||
self.server.runCommand(["parseConfigurationFiles", "", ""])
|
||||
elif next_command == self.SUB_GNERATE_TGTS:
|
||||
self.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
|
||||
self.server.runCommand(["generateTargetsTree", "classes/image.bbclass", []])
|
||||
elif next_command == self.SUB_GENERATE_PKGINFO:
|
||||
self.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
|
||||
elif next_command == self.SUB_SANITY_CHECK:
|
||||
self.runCommand(["triggerEvent", "bb.event.SanityCheck()"])
|
||||
self.server.runCommand(["triggerEvent", "bb.event.RequestPackageInfo()"])
|
||||
elif next_command == self.SUB_BUILD_RECIPES:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
self.runCommand(["buildTargets", self.recipe_queue, self.default_task])
|
||||
self.server.runCommand(["buildTargets", self.recipe_queue, "build"])
|
||||
self.recipe_queue = []
|
||||
elif next_command == self.SUB_BUILD_IMAGE:
|
||||
self.clear_busy()
|
||||
self.building = True
|
||||
targets = [self.image]
|
||||
if self.package_queue:
|
||||
self.runCommand(["setVariable", "LINGUAS_INSTALL", ""])
|
||||
self.runCommand(["setVariable", "PACKAGE_INSTALL", " ".join(self.package_queue)])
|
||||
targets = [self.hob_image]
|
||||
self.server.runCommand(["setVariable", "LINGUAS_INSTALL", ""])
|
||||
self.server.runCommand(["setVariable", "PACKAGE_INSTALL", " ".join(self.package_queue)])
|
||||
if self.toolchain_packages:
|
||||
self.runCommand(["setVariable", "TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages)])
|
||||
targets.append(self.toolchain)
|
||||
self.runCommand(["buildTargets", targets, self.default_task])
|
||||
|
||||
def display_error(self):
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
if self.building:
|
||||
self.building = False
|
||||
self.server.runCommand(["setVariable", "TOOLCHAIN_TARGET_TASK", " ".join(self.toolchain_packages)])
|
||||
targets.append(self.hob_toolchain)
|
||||
self.server.runCommand(["buildTargets", targets, "build"])
|
||||
|
||||
def handle_event(self, event):
|
||||
if not event:
|
||||
return
|
||||
|
||||
if self.building:
|
||||
self.current_phase = "building"
|
||||
self.build.handle_event(event)
|
||||
|
||||
if isinstance(event, bb.event.PackageInfo):
|
||||
self.package_model.populate(event._pkginfolist)
|
||||
self.emit("package-populated")
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckPassed):
|
||||
self.run_next_command()
|
||||
|
||||
elif isinstance(event, bb.event.SanityCheckFailed):
|
||||
self.emit("sanity-failed", event._msg, event._network_error)
|
||||
|
||||
elif isinstance(event, logging.LogRecord):
|
||||
if not self.building:
|
||||
if event.levelno >= logging.ERROR:
|
||||
formatter = bb.msg.BBLogFormatter()
|
||||
msg = formatter.format(event)
|
||||
self.error_msg += msg + '\n'
|
||||
if event.levelno >= logging.ERROR:
|
||||
self.error_msg += event.msg + '\n'
|
||||
|
||||
elif isinstance(event, bb.event.TargetsTreeGenerated):
|
||||
self.current_phase = "data generation"
|
||||
if event._model:
|
||||
self.recipe_model.populate(event._model)
|
||||
self.emit("recipe-populated")
|
||||
elif isinstance(event, bb.event.ConfigFilesFound):
|
||||
self.current_phase = "configuration lookup"
|
||||
var = event._variable
|
||||
@@ -219,9 +179,32 @@ class HobHandler(gobject.GObject):
|
||||
elif isinstance(event, bb.command.CommandCompleted):
|
||||
self.current_phase = None
|
||||
self.run_next_command()
|
||||
# TODO: Currently there are NoProvider issues when generate
|
||||
# universe tree dependency for non-x86 architecture.
|
||||
# Comment the follow code to enable the build of non-x86
|
||||
# architectures in Hob.
|
||||
#elif isinstance(event, bb.event.NoProvider):
|
||||
# if event._runtime:
|
||||
# r = "R"
|
||||
# else:
|
||||
# r = ""
|
||||
# if event._dependees:
|
||||
# self.error_msg += " Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r)
|
||||
# else:
|
||||
# self.error_msg += " Nothing %sPROVIDES '%s'" % (r, event._item)
|
||||
# if event._reasons:
|
||||
# for reason in event._reasons:
|
||||
# self.error_msg += " %s" % reason
|
||||
|
||||
# self.commands_async = []
|
||||
# self.emit("command-failed", self.error_msg)
|
||||
# self.error_msg = ""
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
self.commands_async = []
|
||||
self.display_error()
|
||||
self.clear_busy()
|
||||
self.emit("command-failed", self.error_msg)
|
||||
self.error_msg = ""
|
||||
elif isinstance(event, (bb.event.ParseStarted,
|
||||
bb.event.CacheLoadStarted,
|
||||
bb.event.TreeDataPreparationStarted,
|
||||
@@ -251,101 +234,102 @@ class HobHandler(gobject.GObject):
|
||||
message["title"] = "Parsing recipes: "
|
||||
self.emit("parsing-completed", message)
|
||||
|
||||
if self.error_msg and not self.commands_async:
|
||||
self.display_error()
|
||||
|
||||
return
|
||||
|
||||
def init_cooker(self):
|
||||
self.runCommand(["initCooker"])
|
||||
self.server.runCommand(["initCooker"])
|
||||
|
||||
def parse_config(self):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.run_next_command(self.PARSE_CONFIG)
|
||||
|
||||
def parse_generate_configuration(self):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.generate_configuration()
|
||||
|
||||
def set_extra_inherit(self, bbclass):
|
||||
inherits = self.runCommand(["getVariable", "INHERIT"]) or ""
|
||||
inherits = self.server.runCommand(["getVariable", "INHERIT"]) or ""
|
||||
inherits = inherits + " " + bbclass
|
||||
self.runCommand(["setVariable", "INHERIT", inherits])
|
||||
self.server.runCommand(["setVariable", "INHERIT", inherits])
|
||||
|
||||
def set_bblayers(self, bblayers):
|
||||
self.runCommand(["setVariable", "BBLAYERS_HOB", " ".join(bblayers)])
|
||||
self.server.runCommand(["setVariable", "BBLAYERS", " ".join(bblayers)])
|
||||
|
||||
def set_machine(self, machine):
|
||||
if machine:
|
||||
self.runCommand(["setVariable", "MACHINE_HOB", machine])
|
||||
self.server.runCommand(["setVariable", "MACHINE", machine])
|
||||
|
||||
def set_sdk_machine(self, sdk_machine):
|
||||
self.runCommand(["setVariable", "SDKMACHINE_HOB", sdk_machine])
|
||||
self.server.runCommand(["setVariable", "SDKMACHINE", sdk_machine])
|
||||
|
||||
def set_image_fstypes(self, image_fstypes):
|
||||
self.runCommand(["setVariable", "IMAGE_FSTYPES", image_fstypes])
|
||||
self.server.runCommand(["setVariable", "IMAGE_FSTYPES", image_fstypes])
|
||||
|
||||
def set_distro(self, distro):
|
||||
self.runCommand(["setVariable", "DISTRO_HOB", distro])
|
||||
if distro != "defaultsetup":
|
||||
self.server.runCommand(["setVariable", "DISTRO", distro])
|
||||
|
||||
def set_package_format(self, format):
|
||||
package_classes = ""
|
||||
for pkgfmt in format.split():
|
||||
package_classes += ("package_%s" % pkgfmt + " ")
|
||||
self.runCommand(["setVariable", "PACKAGE_CLASSES_HOB", package_classes])
|
||||
self.server.runCommand(["setVariable", "PACKAGE_CLASSES", package_classes])
|
||||
|
||||
def set_bbthreads(self, threads):
|
||||
self.runCommand(["setVariable", "BB_NUMBER_THREADS_HOB", threads])
|
||||
self.server.runCommand(["setVariable", "BB_NUMBER_THREADS", threads])
|
||||
|
||||
def set_pmake(self, threads):
|
||||
pmake = "-j %s" % threads
|
||||
self.runCommand(["setVariable", "PARALLEL_MAKE_HOB", pmake])
|
||||
self.server.runCommand(["setVariable", "PARALLEL_MAKE", pmake])
|
||||
|
||||
def set_dl_dir(self, directory):
|
||||
self.runCommand(["setVariable", "DL_DIR_HOB", directory])
|
||||
self.server.runCommand(["setVariable", "DL_DIR", directory])
|
||||
|
||||
def set_sstate_dir(self, directory):
|
||||
self.runCommand(["setVariable", "SSTATE_DIR_HOB", directory])
|
||||
self.server.runCommand(["setVariable", "SSTATE_DIR", directory])
|
||||
|
||||
def set_sstate_mirrors(self, url):
|
||||
self.runCommand(["setVariable", "SSTATE_MIRRORS_HOB", url])
|
||||
def set_sstate_mirror(self, url):
|
||||
self.server.runCommand(["setVariable", "SSTATE_MIRROR", url])
|
||||
|
||||
def set_extra_size(self, image_extra_size):
|
||||
self.runCommand(["setVariable", "IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size)])
|
||||
self.server.runCommand(["setVariable", "IMAGE_ROOTFS_EXTRA_SPACE", str(image_extra_size)])
|
||||
|
||||
def set_rootfs_size(self, image_rootfs_size):
|
||||
self.runCommand(["setVariable", "IMAGE_ROOTFS_SIZE", str(image_rootfs_size)])
|
||||
self.server.runCommand(["setVariable", "IMAGE_ROOTFS_SIZE", str(image_rootfs_size)])
|
||||
|
||||
def set_incompatible_license(self, incompat_license):
|
||||
self.runCommand(["setVariable", "INCOMPATIBLE_LICENSE_HOB", incompat_license])
|
||||
self.server.runCommand(["setVariable", "INCOMPATIBLE_LICENSE", incompat_license])
|
||||
|
||||
def set_extra_config(self, extra_setting):
|
||||
for key in extra_setting.keys():
|
||||
value = extra_setting[key]
|
||||
self.runCommand(["setVariable", key, value])
|
||||
|
||||
def set_config_filter(self, config_filter):
|
||||
self.runCommand(["setConfFilter", config_filter])
|
||||
self.server.runCommand(["setVariable", key, value])
|
||||
|
||||
def set_http_proxy(self, http_proxy):
|
||||
self.runCommand(["setVariable", "http_proxy", http_proxy])
|
||||
self.server.runCommand(["setVariable", "http_proxy", http_proxy])
|
||||
|
||||
def set_https_proxy(self, https_proxy):
|
||||
self.runCommand(["setVariable", "https_proxy", https_proxy])
|
||||
self.server.runCommand(["setVariable", "https_proxy", https_proxy])
|
||||
|
||||
def set_ftp_proxy(self, ftp_proxy):
|
||||
self.runCommand(["setVariable", "ftp_proxy", ftp_proxy])
|
||||
self.server.runCommand(["setVariable", "ftp_proxy", ftp_proxy])
|
||||
|
||||
def set_all_proxy(self, all_proxy):
|
||||
self.server.runCommand(["setVariable", "all_proxy", all_proxy])
|
||||
|
||||
def set_git_proxy(self, host, port):
|
||||
self.runCommand(["setVariable", "GIT_PROXY_HOST", host])
|
||||
self.runCommand(["setVariable", "GIT_PROXY_PORT", port])
|
||||
self.server.runCommand(["setVariable", "GIT_PROXY_HOST", host])
|
||||
self.server.runCommand(["setVariable", "GIT_PROXY_PORT", port])
|
||||
|
||||
def set_cvs_proxy(self, host, port):
|
||||
self.runCommand(["setVariable", "CVS_PROXY_HOST", host])
|
||||
self.runCommand(["setVariable", "CVS_PROXY_PORT", port])
|
||||
self.server.runCommand(["setVariable", "CVS_PROXY_HOST", host])
|
||||
self.server.runCommand(["setVariable", "CVS_PROXY_PORT", port])
|
||||
|
||||
def request_package_info(self):
|
||||
self.commands_async.append(self.SUB_GENERATE_PKGINFO)
|
||||
self.run_next_command(self.POPULATE_PACKAGEINFO)
|
||||
|
||||
def trigger_sanity_check(self):
|
||||
self.commands_async.append(self.SUB_SANITY_CHECK)
|
||||
self.run_next_command(self.SANITY_CHECK)
|
||||
|
||||
def generate_configuration(self):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_PATH_LAYERS)
|
||||
self.commands_async.append(self.SUB_FILES_DISTRO)
|
||||
self.commands_async.append(self.SUB_FILES_MACH)
|
||||
@@ -357,22 +341,20 @@ class HobHandler(gobject.GObject):
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_GNERATE_TGTS)
|
||||
self.run_next_command(self.GENERATE_RECIPES)
|
||||
|
||||
def generate_packages(self, tgts, default_task="build"):
|
||||
|
||||
def generate_packages(self, tgts):
|
||||
targets = []
|
||||
targets.extend(tgts)
|
||||
self.recipe_queue = targets
|
||||
self.default_task = default_task
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_RECIPES)
|
||||
self.run_next_command(self.GENERATE_PACKAGES)
|
||||
|
||||
def generate_image(self, image, toolchain, image_packages=[], toolchain_packages=[], default_task="build"):
|
||||
self.image = image
|
||||
self.toolchain = toolchain
|
||||
self.package_queue = image_packages
|
||||
def generate_image(self, tgts, hob_image, hob_toolchain, toolchain_packages=[]):
|
||||
self.package_queue = tgts
|
||||
self.hob_image = hob_image
|
||||
self.hob_toolchain = hob_toolchain
|
||||
self.toolchain_packages = toolchain_packages
|
||||
self.default_task = default_task
|
||||
self.commands_async.append(self.SUB_PARSE_CONFIG)
|
||||
self.commands_async.append(self.SUB_BUILD_IMAGE)
|
||||
self.run_next_command(self.GENERATE_IMAGE)
|
||||
@@ -386,23 +368,20 @@ class HobHandler(gobject.GObject):
|
||||
self.building = False
|
||||
|
||||
def cancel_parse(self):
|
||||
self.runCommand(["stateStop"])
|
||||
self.server.runCommand(["stateStop"])
|
||||
|
||||
def cancel_build(self, force=False):
|
||||
if force:
|
||||
# Force the cooker to stop as quickly as possible
|
||||
self.runCommand(["stateStop"])
|
||||
self.server.runCommand(["stateStop"])
|
||||
else:
|
||||
# Wait for tasks to complete before shutting down, this helps
|
||||
# leave the workdir in a usable state
|
||||
self.runCommand(["stateShutdown"])
|
||||
self.server.runCommand(["stateShutdown"])
|
||||
|
||||
def reset_build(self):
|
||||
self.build.reset()
|
||||
|
||||
def get_logfile(self):
|
||||
return self.server.runCommand(["getVariable", "BB_CONSOLELOG"])[0]
|
||||
|
||||
def _remove_redundant(self, string):
|
||||
ret = []
|
||||
for i in string.split():
|
||||
@@ -413,19 +392,19 @@ class HobHandler(gobject.GObject):
|
||||
def get_parameters(self):
|
||||
# retrieve the parameters from bitbake
|
||||
params = {}
|
||||
params["core_base"] = self.runCommand(["getVariable", "COREBASE"]) or ""
|
||||
params["core_base"] = self.server.runCommand(["getVariable", "COREBASE"]) or ""
|
||||
hob_layer = params["core_base"] + "/meta-hob"
|
||||
params["layer"] = self.runCommand(["getVariable", "BBLAYERS"]) or ""
|
||||
params["layer"] = self.server.runCommand(["getVariable", "BBLAYERS"]) or ""
|
||||
if hob_layer not in params["layer"].split():
|
||||
params["layer"] += (" " + hob_layer)
|
||||
params["dldir"] = self.runCommand(["getVariable", "DL_DIR"]) or ""
|
||||
params["machine"] = self.runCommand(["getVariable", "MACHINE"]) or ""
|
||||
params["distro"] = self.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
|
||||
params["pclass"] = self.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
|
||||
params["sstatedir"] = self.runCommand(["getVariable", "SSTATE_DIR"]) or ""
|
||||
params["sstatemirror"] = self.runCommand(["getVariable", "SSTATE_MIRRORS"]) or ""
|
||||
params["dldir"] = self.server.runCommand(["getVariable", "DL_DIR"]) or ""
|
||||
params["machine"] = self.server.runCommand(["getVariable", "MACHINE"]) or ""
|
||||
params["distro"] = self.server.runCommand(["getVariable", "DISTRO"]) or "defaultsetup"
|
||||
params["pclass"] = self.server.runCommand(["getVariable", "PACKAGE_CLASSES"]) or ""
|
||||
params["sstatedir"] = self.server.runCommand(["getVariable", "SSTATE_DIR"]) or ""
|
||||
params["sstatemirror"] = self.server.runCommand(["getVariable", "SSTATE_MIRROR"]) or ""
|
||||
|
||||
num_threads = self.runCommand(["getCpuCount"])
|
||||
num_threads = self.server.runCommand(["getCpuCount"])
|
||||
if not num_threads:
|
||||
num_threads = 1
|
||||
max_threads = 65536
|
||||
@@ -438,7 +417,7 @@ class HobHandler(gobject.GObject):
|
||||
max_threads = 65536
|
||||
params["max_threads"] = max_threads
|
||||
|
||||
bbthread = self.runCommand(["getVariable", "BB_NUMBER_THREADS"])
|
||||
bbthread = self.server.runCommand(["getVariable", "BB_NUMBER_THREADS"])
|
||||
if not bbthread:
|
||||
bbthread = num_threads
|
||||
else:
|
||||
@@ -448,7 +427,7 @@ class HobHandler(gobject.GObject):
|
||||
bbthread = num_threads
|
||||
params["bbthread"] = bbthread
|
||||
|
||||
pmake = self.runCommand(["getVariable", "PARALLEL_MAKE"])
|
||||
pmake = self.server.runCommand(["getVariable", "PARALLEL_MAKE"])
|
||||
if not pmake:
|
||||
pmake = num_threads
|
||||
elif isinstance(pmake, int):
|
||||
@@ -460,9 +439,9 @@ class HobHandler(gobject.GObject):
|
||||
pmake = num_threads
|
||||
params["pmake"] = "-j %s" % pmake
|
||||
|
||||
params["image_addr"] = self.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
|
||||
params["image_addr"] = self.server.runCommand(["getVariable", "DEPLOY_DIR_IMAGE"]) or ""
|
||||
|
||||
image_extra_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
|
||||
image_extra_size = self.server.runCommand(["getVariable", "IMAGE_ROOTFS_EXTRA_SPACE"])
|
||||
if not image_extra_size:
|
||||
image_extra_size = 0
|
||||
else:
|
||||
@@ -472,7 +451,7 @@ class HobHandler(gobject.GObject):
|
||||
image_extra_size = 0
|
||||
params["image_extra_size"] = image_extra_size
|
||||
|
||||
image_rootfs_size = self.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
|
||||
image_rootfs_size = self.server.runCommand(["getVariable", "IMAGE_ROOTFS_SIZE"])
|
||||
if not image_rootfs_size:
|
||||
image_rootfs_size = 0
|
||||
else:
|
||||
@@ -482,7 +461,7 @@ class HobHandler(gobject.GObject):
|
||||
image_rootfs_size = 0
|
||||
params["image_rootfs_size"] = image_rootfs_size
|
||||
|
||||
image_overhead_factor = self.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
|
||||
image_overhead_factor = self.server.runCommand(["getVariable", "IMAGE_OVERHEAD_FACTOR"])
|
||||
if not image_overhead_factor:
|
||||
image_overhead_factor = 1
|
||||
else:
|
||||
@@ -492,39 +471,36 @@ class HobHandler(gobject.GObject):
|
||||
image_overhead_factor = 1
|
||||
params['image_overhead_factor'] = image_overhead_factor
|
||||
|
||||
params["incompat_license"] = self._remove_redundant(self.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
|
||||
params["sdk_machine"] = self.runCommand(["getVariable", "SDKMACHINE"]) or self.runCommand(["getVariable", "SDK_ARCH"]) or ""
|
||||
params["incompat_license"] = self._remove_redundant(self.server.runCommand(["getVariable", "INCOMPATIBLE_LICENSE"]) or "")
|
||||
params["sdk_machine"] = self.server.runCommand(["getVariable", "SDKMACHINE"]) or self.server.runCommand(["getVariable", "SDK_ARCH"]) or ""
|
||||
|
||||
params["image_fstypes"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
|
||||
params["image_fstypes"] = self._remove_redundant(self.server.runCommand(["getVariable", "IMAGE_FSTYPES"]) or "")
|
||||
|
||||
params["image_types"] = self._remove_redundant(self.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
|
||||
params["image_types"] = self._remove_redundant(self.server.runCommand(["getVariable", "IMAGE_TYPES"]) or "")
|
||||
|
||||
params["conf_version"] = self.runCommand(["getVariable", "CONF_VERSION"]) or ""
|
||||
params["lconf_version"] = self.runCommand(["getVariable", "LCONF_VERSION"]) or ""
|
||||
params["conf_version"] = self.server.runCommand(["getVariable", "CONF_VERSION"]) or ""
|
||||
params["lconf_version"] = self.server.runCommand(["getVariable", "LCONF_VERSION"]) or ""
|
||||
|
||||
params["runnable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
|
||||
params["runnable_machine_patterns"] = self._remove_redundant(self.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
|
||||
params["deployable_image_types"] = self._remove_redundant(self.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
|
||||
params["kernel_image_type"] = self.runCommand(["getVariable", "KERNEL_IMAGETYPE"]) or ""
|
||||
params["tmpdir"] = self.runCommand(["getVariable", "TMPDIR"]) or ""
|
||||
params["distro_version"] = self.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
|
||||
params["target_os"] = self.runCommand(["getVariable", "TARGET_OS"]) or ""
|
||||
params["target_arch"] = self.runCommand(["getVariable", "TARGET_ARCH"]) or ""
|
||||
params["tune_pkgarch"] = self.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
|
||||
params["bb_version"] = self.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
|
||||
params["runnable_image_types"] = self._remove_redundant(self.server.runCommand(["getVariable", "RUNNABLE_IMAGE_TYPES"]) or "")
|
||||
params["runnable_machine_patterns"] = self._remove_redundant(self.server.runCommand(["getVariable", "RUNNABLE_MACHINE_PATTERNS"]) or "")
|
||||
params["deployable_image_types"] = self._remove_redundant(self.server.runCommand(["getVariable", "DEPLOYABLE_IMAGE_TYPES"]) or "")
|
||||
params["tmpdir"] = self.server.runCommand(["getVariable", "TMPDIR"]) or ""
|
||||
params["distro_version"] = self.server.runCommand(["getVariable", "DISTRO_VERSION"]) or ""
|
||||
params["target_os"] = self.server.runCommand(["getVariable", "TARGET_OS"]) or ""
|
||||
params["target_arch"] = self.server.runCommand(["getVariable", "TARGET_ARCH"]) or ""
|
||||
params["tune_pkgarch"] = self.server.runCommand(["getVariable", "TUNE_PKGARCH"]) or ""
|
||||
params["bb_version"] = self.server.runCommand(["getVariable", "BB_MIN_VERSION"]) or ""
|
||||
params["tune_arch"] = self.server.runCommand(["getVariable", "TUNE_ARCH"]) or ""
|
||||
|
||||
params["default_task"] = self.runCommand(["getVariable", "BB_DEFAULT_TASK"]) or "build"
|
||||
params["git_proxy_host"] = self.server.runCommand(["getVariable", "GIT_PROXY_HOST"]) or ""
|
||||
params["git_proxy_port"] = self.server.runCommand(["getVariable", "GIT_PROXY_PORT"]) or ""
|
||||
|
||||
params["git_proxy_host"] = self.runCommand(["getVariable", "GIT_PROXY_HOST"]) or ""
|
||||
params["git_proxy_port"] = self.runCommand(["getVariable", "GIT_PROXY_PORT"]) or ""
|
||||
params["http_proxy"] = self.server.runCommand(["getVariable", "http_proxy"]) or ""
|
||||
params["ftp_proxy"] = self.server.runCommand(["getVariable", "ftp_proxy"]) or ""
|
||||
params["https_proxy"] = self.server.runCommand(["getVariable", "https_proxy"]) or ""
|
||||
params["all_proxy"] = self.server.runCommand(["getVariable", "all_proxy"]) or ""
|
||||
|
||||
params["http_proxy"] = self.runCommand(["getVariable", "http_proxy"]) or ""
|
||||
params["ftp_proxy"] = self.runCommand(["getVariable", "ftp_proxy"]) or ""
|
||||
params["https_proxy"] = self.runCommand(["getVariable", "https_proxy"]) or ""
|
||||
params["cvs_proxy_host"] = self.server.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
|
||||
params["cvs_proxy_port"] = self.server.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
|
||||
|
||||
params["cvs_proxy_host"] = self.runCommand(["getVariable", "CVS_PROXY_HOST"]) or ""
|
||||
params["cvs_proxy_port"] = self.runCommand(["getVariable", "CVS_PROXY_PORT"]) or ""
|
||||
|
||||
params["image_white_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_WHITE_PATTERN"]) or ""
|
||||
params["image_black_pattern"] = self.runCommand(["getVariable", "BBUI_IMAGE_BLACK_PATTERN"]) or ""
|
||||
return params
|
||||
|
||||
@@ -34,7 +34,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
providing convenience functions to access gtk.TreeModel subclasses which
|
||||
provide filtered views of the data.
|
||||
"""
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC, COL_FADE_INC, COL_FONT) = range(13)
|
||||
(COL_NAME, COL_VER, COL_REV, COL_RNM, COL_SEC, COL_SUM, COL_RDEP, COL_RPROV, COL_SIZE, COL_BINB, COL_INC, COL_FADE_INC) = range(12)
|
||||
|
||||
__gsignals__ = {
|
||||
"package-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
@@ -42,8 +42,6 @@ class PackageListModel(gtk.TreeStore):
|
||||
()),
|
||||
}
|
||||
|
||||
__toolchain_required_packages__ = ["packagegroup-core-standalone-sdk-target", "packagegroup-core-standalone-sdk-target-dbg"]
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.contents = None
|
||||
@@ -65,8 +63,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_STRING,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_BOOLEAN,
|
||||
gobject.TYPE_STRING)
|
||||
gobject.TYPE_BOOLEAN)
|
||||
|
||||
|
||||
"""
|
||||
@@ -145,12 +142,6 @@ class PackageListModel(gtk.TreeStore):
|
||||
self.pkg_path = {}
|
||||
self.rprov_pkg = {}
|
||||
|
||||
def getpkgvalue(pkgdict, key, pkgname, defaultval = None):
|
||||
value = pkgdict.get('%s_%s' % (key, pkgname), None)
|
||||
if not value:
|
||||
value = pkgdict.get(key, defaultval)
|
||||
return value
|
||||
|
||||
for pkginfo in pkginfolist:
|
||||
pn = pkginfo['PN']
|
||||
pv = pkginfo['PV']
|
||||
@@ -163,24 +154,25 @@ class PackageListModel(gtk.TreeStore):
|
||||
self.COL_INC, False)
|
||||
self.pn_path[pn] = self.get_path(pniter)
|
||||
|
||||
# PKG is always present
|
||||
pkg = pkginfo['PKG']
|
||||
pkgv = getpkgvalue(pkginfo, 'PKGV', pkg)
|
||||
pkgr = getpkgvalue(pkginfo, 'PKGR', pkg)
|
||||
# PKGSIZE is artificial, will always be overridden with the package name if present
|
||||
pkgsize = pkginfo.get('PKGSIZE_%s' % pkg, "0")
|
||||
# PKG_%s is the renamed version
|
||||
pkg_rename = pkginfo.get('PKG_%s' % pkg, "")
|
||||
# The rest may be overridden or not
|
||||
section = getpkgvalue(pkginfo, 'SECTION', pkg, "")
|
||||
summary = getpkgvalue(pkginfo, 'SUMMARY', pkg, "")
|
||||
rdep = getpkgvalue(pkginfo, 'RDEPENDS', pkg, "")
|
||||
rrec = getpkgvalue(pkginfo, 'RRECOMMENDS', pkg, "")
|
||||
rprov = getpkgvalue(pkginfo, 'RPROVIDES', pkg, "")
|
||||
pkgv = pkginfo['PKGV']
|
||||
pkgr = pkginfo['PKGR']
|
||||
pkgsize = pkginfo['PKGSIZE_%s' % pkg] if 'PKGSIZE_%s' % pkg in pkginfo.keys() else "0"
|
||||
pkg_rename = pkginfo['PKG_%s' % pkg] if 'PKG_%s' % pkg in pkginfo.keys() else ""
|
||||
section = pkginfo['SECTION_%s' % pkg] if 'SECTION_%s' % pkg in pkginfo.keys() else ""
|
||||
summary = pkginfo['SUMMARY_%s' % pkg] if 'SUMMARY_%s' % pkg in pkginfo.keys() else ""
|
||||
rdep = pkginfo['RDEPENDS_%s' % pkg] if 'RDEPENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rrec = pkginfo['RRECOMMENDS_%s' % pkg] if 'RRECOMMENDS_%s' % pkg in pkginfo.keys() else ""
|
||||
rprov = pkginfo['RPROVIDES_%s' % pkg] if 'RPROVIDES_%s' % pkg in pkginfo.keys() else ""
|
||||
for i in rprov.split():
|
||||
self.rprov_pkg[i] = pkg
|
||||
|
||||
allow_empty = getpkgvalue(pkginfo, 'ALLOW_EMPTY', pkg, "")
|
||||
if 'ALLOW_EMPTY_%s' % pkg in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY_%s' % pkg]
|
||||
elif 'ALLOW_EMPTY' in pkginfo.keys():
|
||||
allow_empty = pkginfo['ALLOW_EMPTY']
|
||||
else:
|
||||
allow_empty = ""
|
||||
|
||||
if pkgsize == "0" and not allow_empty:
|
||||
continue
|
||||
@@ -195,7 +187,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
self.COL_SEC, section, self.COL_SUM, summary,
|
||||
self.COL_RDEP, rdep + ' ' + rrec,
|
||||
self.COL_RPROV, rprov, self.COL_SIZE, size,
|
||||
self.COL_BINB, "", self.COL_INC, False, self.COL_FONT, '10')
|
||||
self.COL_BINB, "", self.COL_INC, False)
|
||||
|
||||
"""
|
||||
Check whether the item at item_path is included or not
|
||||
@@ -337,13 +329,13 @@ class PackageListModel(gtk.TreeStore):
|
||||
set_selected_packages(), some packages will not be set included.
|
||||
Return the un-set packages list.
|
||||
"""
|
||||
def set_selected_packages(self, packagelist, user_selected=False):
|
||||
def set_selected_packages(self, packagelist):
|
||||
left = []
|
||||
binb = 'User Selected' if user_selected else ''
|
||||
for pn in packagelist:
|
||||
if pn in self.pkg_path.keys():
|
||||
path = self.pkg_path[pn]
|
||||
self.include_item(item_path=path, binb=binb)
|
||||
self.include_item(item_path=path,
|
||||
binb="User Selected")
|
||||
else:
|
||||
left.append(pn)
|
||||
|
||||
@@ -359,7 +351,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
while child_it:
|
||||
if self.get_value(child_it, self.COL_INC):
|
||||
binb = self.get_value(child_it, self.COL_BINB)
|
||||
if binb == "User Selected":
|
||||
if not binb or binb == "User Selected":
|
||||
name = self.get_value(child_it, self.COL_NAME)
|
||||
packagelist.append(name)
|
||||
child_it = self.iter_next(child_it)
|
||||
@@ -397,7 +389,7 @@ class PackageListModel(gtk.TreeStore):
|
||||
child_it = self.iter_next(child_it)
|
||||
it = self.iter_next(it)
|
||||
|
||||
return list(set(packagelist + self.__toolchain_required_packages__));
|
||||
return packagelist
|
||||
"""
|
||||
Return the selected package size, unit is B.
|
||||
"""
|
||||
@@ -461,7 +453,7 @@ class RecipeListModel(gtk.ListStore):
|
||||
"""
|
||||
(COL_NAME, COL_DESC, COL_LIC, COL_GROUP, COL_DEPS, COL_BINB, COL_TYPE, COL_INC, COL_IMG, COL_INSTALL, COL_PN, COL_FADE_INC) = range(12)
|
||||
|
||||
__custom_image__ = "Create your own image"
|
||||
__dummy_image__ = "Create your own image"
|
||||
|
||||
__gsignals__ = {
|
||||
"recipe-selection-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
@@ -526,24 +518,17 @@ class RecipeListModel(gtk.ListStore):
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_INC)
|
||||
return ((val1 == True) and (val2 == False))
|
||||
|
||||
def include_item_sort_func(self, model, iter1, iter2):
|
||||
val1 = model.get_value(iter1, RecipeListModel.COL_INC)
|
||||
val2 = model.get_value(iter2, RecipeListModel.COL_INC)
|
||||
return ((val1 == False) and (val2 == True))
|
||||
|
||||
"""
|
||||
Create, if required, and return a filtered gtk.TreeModelSort
|
||||
containing only the items which are items specified by filter
|
||||
"""
|
||||
def tree_model(self, filter, excluded_items_ahead=False, included_items_ahead=True):
|
||||
def tree_model(self, filter, excluded_items_ahead=False):
|
||||
model = self.filter_new()
|
||||
model.set_visible_func(self.tree_model_filter, filter)
|
||||
|
||||
sort = gtk.TreeModelSort(model)
|
||||
if excluded_items_ahead:
|
||||
sort.set_default_sort_func(self.exclude_item_sort_func)
|
||||
elif included_items_ahead:
|
||||
sort.set_default_sort_func(self.include_item_sort_func)
|
||||
else:
|
||||
sort.set_sort_column_id(RecipeListModel.COL_NAME, gtk.SORT_ASCENDING)
|
||||
sort.set_default_sort_func(None)
|
||||
@@ -577,13 +562,12 @@ class RecipeListModel(gtk.ListStore):
|
||||
self.clear()
|
||||
|
||||
# dummy image for prompt
|
||||
self.set(self.append(), self.COL_NAME, self.__custom_image__,
|
||||
self.COL_DESC, "Use 'Edit image' to customize recipes and packages " \
|
||||
"to be included in your image ",
|
||||
self.set(self.append(), self.COL_NAME, self.__dummy_image__,
|
||||
self.COL_DESC, "",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, "", self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__custom_image__)
|
||||
self.COL_IMG, False, self.COL_INSTALL, "", self.COL_PN, self.__dummy_image__)
|
||||
|
||||
for item in event_model["pn"]:
|
||||
name = item
|
||||
@@ -595,8 +579,8 @@ class RecipeListModel(gtk.ListStore):
|
||||
|
||||
depends = event_model["depends"].get(item, []) + event_model["rdepends-pn"].get(item, [])
|
||||
|
||||
if ('packagegroup.bbclass' in " ".join(inherits)):
|
||||
atype = 'packagegroup'
|
||||
if ('task-' in name):
|
||||
atype = 'task'
|
||||
elif ('image.bbclass' in " ".join(inherits)):
|
||||
if name != "hob-image":
|
||||
atype = 'image'
|
||||
@@ -631,6 +615,22 @@ class RecipeListModel(gtk.ListStore):
|
||||
def path_included(self, item_path):
|
||||
return self[item_path][self.COL_INC]
|
||||
|
||||
"""
|
||||
Append a certain image into the combobox
|
||||
"""
|
||||
def image_list_append(self, name, deps, install):
|
||||
# check whether a certain image is there
|
||||
if not name or self.find_path_for_item(name):
|
||||
return
|
||||
it = self.append()
|
||||
self.set(it, self.COL_NAME, name, self.COL_DESC, "",
|
||||
self.COL_LIC, "", self.COL_GROUP, "",
|
||||
self.COL_DEPS, deps, self.COL_BINB, "",
|
||||
self.COL_TYPE, "image", self.COL_INC, False,
|
||||
self.COL_IMG, False, self.COL_INSTALL, install,
|
||||
self.COL_PN, name)
|
||||
self.pn_path[name] = self.get_path(it)
|
||||
|
||||
"""
|
||||
Add this item, and any of its dependencies, to the image contents
|
||||
"""
|
||||
@@ -672,10 +672,6 @@ class RecipeListModel(gtk.ListStore):
|
||||
self[dep_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
elif not dep_included:
|
||||
self.include_item(dep_path, binb=item_name, image_contents=image_contents)
|
||||
dep_bin = self[item_path][self.COL_BINB].split(', ')
|
||||
if self[item_path][self.COL_NAME] in dep_bin:
|
||||
dep_bin.remove(self[item_path][self.COL_NAME])
|
||||
self[item_path][self.COL_BINB] = ', '.join(dep_bin).lstrip(', ')
|
||||
|
||||
def exclude_item(self, item_path):
|
||||
if not self.path_included(item_path):
|
||||
|
||||
@@ -38,7 +38,6 @@ class HobPage (gtk.VBox):
|
||||
self.title = "Hob -- Image Creator"
|
||||
else:
|
||||
self.title = title
|
||||
self.title_label = gtk.Label()
|
||||
|
||||
self.box_group_area = gtk.VBox(False, 12)
|
||||
self.box_group_area.set_size_request(self.builder_width - 73 - 73, self.builder_height - 88 - 15 - 15)
|
||||
@@ -47,9 +46,6 @@ class HobPage (gtk.VBox):
|
||||
self.group_align.add(self.box_group_area)
|
||||
self.box_group_area.set_homogeneous(False)
|
||||
|
||||
def set_title(self, title):
|
||||
self.title = title
|
||||
self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
|
||||
def add_onto_top_bar(self, widget = None, padding = 0):
|
||||
# the top button occupies 1/7 of the page height
|
||||
@@ -62,9 +58,9 @@ class HobPage (gtk.VBox):
|
||||
|
||||
hbox = gtk.HBox()
|
||||
|
||||
self.title_label = gtk.Label()
|
||||
self.title_label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
hbox.pack_start(self.title_label, expand=False, fill=False, padding=20)
|
||||
label = gtk.Label()
|
||||
label.set_markup("<span size='x-large'>%s</span>" % self.title)
|
||||
hbox.pack_start(label, expand=False, fill=False, padding=20)
|
||||
|
||||
if widget:
|
||||
# add the widget in the event box
|
||||
|
||||
@@ -23,7 +23,6 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
import pango, pangocairo
|
||||
import cairo
|
||||
import math
|
||||
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
@@ -63,6 +62,34 @@ class hic:
|
||||
ICON_INDI_TICK_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/tick.png'))
|
||||
ICON_INDI_INFO_FILE = os.path.join(HOB_ICON_BASE_DIR, ('indicators/info.png'))
|
||||
|
||||
class hcc:
|
||||
|
||||
SUPPORTED_IMAGE_TYPES = {
|
||||
"jffs2" : ["jffs2"],
|
||||
"sum.jffs2" : ["sum.jffs2"],
|
||||
"cramfs" : ["cramfs"],
|
||||
"ext2" : ["ext2"],
|
||||
"ext2.gz" : ["ext2.gz"],
|
||||
"ext2.bz2" : ["ext2.bz2"],
|
||||
"ext3" : ["ext3"],
|
||||
"ext3.gz" : ["ext3.gz"],
|
||||
"ext2.lzma" : ["ext2.lzma"],
|
||||
"btrfs" : ["btrfs"],
|
||||
"live" : ["hddimg", "iso"],
|
||||
"squashfs" : ["squashfs"],
|
||||
"squashfs-lzma" : ["squashfs-lzma"],
|
||||
"ubi" : ["ubi"],
|
||||
"tar" : ["tar"],
|
||||
"tar.gz" : ["tar.gz"],
|
||||
"tar.bz2" : ["tar.bz2"],
|
||||
"tar.xz" : ["tar.xz"],
|
||||
"cpio" : ["cpio"],
|
||||
"cpio.gz" : ["cpio.gz"],
|
||||
"cpio.xz" : ["cpio.xz"],
|
||||
"vmdk" : ["vmdk"],
|
||||
"cpio.lzma" : ["cpio.lzma"],
|
||||
}
|
||||
|
||||
class HobViewTable (gtk.VBox):
|
||||
"""
|
||||
A VBox to contain the table for different recipe views and package view
|
||||
@@ -92,7 +119,6 @@ class HobViewTable (gtk.VBox):
|
||||
self.table_tree.set_headers_clickable(True)
|
||||
self.table_tree.set_enable_search(True)
|
||||
self.table_tree.set_rules_hint(True)
|
||||
self.table_tree.set_enable_tree_lines(True)
|
||||
self.table_tree.get_selection().set_mode(gtk.SELECTION_SINGLE)
|
||||
self.toggle_columns = []
|
||||
self.table_tree.connect("row-activated", self.row_activated_cb)
|
||||
@@ -114,8 +140,6 @@ class HobViewTable (gtk.VBox):
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_attributes(cell, text=column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
elif column['col_style'] == 'check toggle':
|
||||
cell = HobCellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
@@ -125,8 +149,6 @@ class HobViewTable (gtk.VBox):
|
||||
col.pack_end(cell, True)
|
||||
col.set_attributes(cell, active=column['col_id'])
|
||||
self.toggle_columns.append(column['col_name'])
|
||||
if 'col_group' in column.keys():
|
||||
col.set_cell_data_func(cell, self.set_group_number_cb)
|
||||
elif column['col_style'] == 'radio toggle':
|
||||
cell = gtk.CellRendererToggle()
|
||||
cell.set_property('activatable', True)
|
||||
@@ -140,11 +162,10 @@ class HobViewTable (gtk.VBox):
|
||||
cell = gtk.CellRendererText()
|
||||
col.pack_start(cell, True)
|
||||
col.set_cell_data_func(cell, self.display_binb_cb, column['col_id'])
|
||||
if 'col_t_id' in column.keys():
|
||||
col.add_attribute(cell, 'font', column['col_t_id'])
|
||||
|
||||
scroll = gtk.ScrolledWindow()
|
||||
scroll.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
|
||||
scroll.set_shadow_type(gtk.SHADOW_IN)
|
||||
scroll.add(self.table_tree)
|
||||
self.pack_start(scroll, True, True, 0)
|
||||
|
||||
@@ -153,14 +174,8 @@ class HobViewTable (gtk.VBox):
|
||||
# Just display the first item
|
||||
if binb:
|
||||
bin = binb.split(', ')
|
||||
total_no = len(bin)
|
||||
if total_no > 1 and bin[0] == "User Selected":
|
||||
present_binb = bin[1] + ' (+' + str(total_no) + ')'
|
||||
else:
|
||||
present_binb = bin[0] + ' (+' + str(total_no) + ')'
|
||||
cell.set_property('text', present_binb)
|
||||
else:
|
||||
cell.set_property('text', "")
|
||||
cell.set_property('text', bin[0])
|
||||
|
||||
return True
|
||||
|
||||
def set_model(self, tree_model):
|
||||
@@ -189,15 +204,6 @@ class HobViewTable (gtk.VBox):
|
||||
def stop_cell_fadeinout_cb(self, ctrl, cell, tree):
|
||||
self.emit("cell-fadeinout-stopped", ctrl, cell, tree)
|
||||
|
||||
def set_group_number_cb(self, col, cell, model, iter):
|
||||
if model and (model.iter_parent(iter) == None):
|
||||
cell.cell_attr["number_of_children"] = model.iter_n_children(iter)
|
||||
else:
|
||||
cell.cell_attr["number_of_children"] = 0
|
||||
|
||||
def connect_group_selection(self, cb_func):
|
||||
self.table_tree.get_selection().connect("changed", cb_func)
|
||||
|
||||
"""
|
||||
A method to calculate a softened value for the colour of widget when in the
|
||||
provided state.
|
||||
@@ -219,7 +225,7 @@ def soften_color(widget, state=gtk.STATE_NORMAL):
|
||||
color.blue = color.blue * blend + style.base[state].blue * (1.0 - blend)
|
||||
return color.to_string()
|
||||
|
||||
class BaseHobButton(gtk.Button):
|
||||
class HobButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button subclass which follows the visual design of Hob for primary
|
||||
action buttons
|
||||
@@ -233,33 +239,24 @@ class BaseHobButton(gtk.Button):
|
||||
@staticmethod
|
||||
def style_button(button):
|
||||
style = button.get_style()
|
||||
style = gtk.rc_get_style_by_paths(gtk.settings_get_default(), 'gtk-button', 'gtk-button', gobject.TYPE_NONE)
|
||||
button_color = gtk.gdk.Color(HobColors.ORANGE)
|
||||
button.modify_bg(gtk.STATE_NORMAL, button_color)
|
||||
button.modify_bg(gtk.STATE_PRELIGHT, button_color)
|
||||
button.modify_bg(gtk.STATE_SELECTED, button_color)
|
||||
|
||||
button.set_flags(gtk.CAN_DEFAULT)
|
||||
button.grab_default()
|
||||
|
||||
# label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
|
||||
label = button.get_label()
|
||||
label = "<span size='x-large'><b>%s</b></span>" % gobject.markup_escape_text(button.get_label())
|
||||
button.set_label(label)
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
class HobButton(BaseHobButton):
|
||||
"""
|
||||
A gtk.Button subclass which follows the visual design of Hob for primary
|
||||
action buttons
|
||||
|
||||
label: the text to display as the button's label
|
||||
"""
|
||||
def __init__(self, label):
|
||||
BaseHobButton.__init__(self, label)
|
||||
HobButton.style_button(self)
|
||||
|
||||
class HobAltButton(BaseHobButton):
|
||||
class HobAltButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button subclass which has no relief, and so is more discrete
|
||||
"""
|
||||
def __init__(self, label):
|
||||
BaseHobButton.__init__(self, label)
|
||||
gtk.Button.__init__(self, label)
|
||||
HobAltButton.style_button(self)
|
||||
|
||||
"""
|
||||
@@ -285,6 +282,14 @@ class HobAltButton(BaseHobButton):
|
||||
button.set_label("<span size='large' color='%s'><b>%s</b></span>" % (colour, gobject.markup_escape_text(button.text)))
|
||||
button.child.set_use_markup(True)
|
||||
|
||||
@staticmethod
|
||||
def style_button(button):
|
||||
button.text = button.get_label()
|
||||
button.connect("state-changed", HobAltButton.desensitise_on_state_change_cb)
|
||||
HobAltButton.set_text(button)
|
||||
button.child.set_use_markup(True)
|
||||
button.set_relief(gtk.RELIEF_NONE)
|
||||
|
||||
class HobImageButton(gtk.Button):
|
||||
"""
|
||||
A gtk.Button with an icon and two rows of text, the second of which is
|
||||
@@ -337,8 +342,7 @@ class HobInfoButton(gtk.EventBox):
|
||||
def __init__(self, tip_markup, parent=None):
|
||||
gtk.EventBox.__init__(self)
|
||||
self.image = gtk.Image()
|
||||
self.image.set_from_file(
|
||||
hic.ICON_INFO_DISPLAY_FILE)
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
self.image.show()
|
||||
self.add(self.image)
|
||||
|
||||
@@ -376,95 +380,363 @@ class HobInfoButton(gtk.EventBox):
|
||||
def mouse_out_cb(self, widget, event):
|
||||
self.image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
|
||||
class HobIndicator(gtk.DrawingArea):
|
||||
def __init__(self, count):
|
||||
gtk.DrawingArea.__init__(self)
|
||||
# Set no window for transparent background
|
||||
self.set_has_window(False)
|
||||
self.set_size_request(38,38)
|
||||
# We need to pass through button clicks
|
||||
self.add_events(gtk.gdk.BUTTON_PRESS_MASK | gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
class HobTabBar(gtk.DrawingArea):
|
||||
__gsignals__ = {
|
||||
"blank-area-changed" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,
|
||||
gobject.TYPE_INT,)),
|
||||
|
||||
self.connect('expose-event', self.expose)
|
||||
"tab-switched" : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_INT,)),
|
||||
}
|
||||
|
||||
self.count = count
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
def expose(self, widget, event):
|
||||
if self.count and self.count > 0:
|
||||
ctx = widget.window.cairo_create()
|
||||
|
||||
x, y, w, h = self.allocation
|
||||
|
||||
ctx.set_operator(cairo.OPERATOR_OVER)
|
||||
ctx.set_source_color(gtk.gdk.color_parse(self.color))
|
||||
ctx.translate(w/2, h/2)
|
||||
ctx.arc(x, y, min(w,h)/2 - 2, 0, 2*math.pi)
|
||||
ctx.fill_preserve()
|
||||
|
||||
layout = self.create_pango_layout(str(self.count))
|
||||
textw, texth = layout.get_pixel_size()
|
||||
x = (w/2)-(textw/2) + x
|
||||
y = (h/2) - (texth/2) + y
|
||||
ctx.move_to(x, y)
|
||||
self.window.draw_layout(self.style.light_gc[gtk.STATE_NORMAL], int(x), int(y), layout)
|
||||
|
||||
def set_count(self, count):
|
||||
self.count = count
|
||||
|
||||
def set_active(self, active):
|
||||
if active:
|
||||
self.color = HobColors.DEEP_RED
|
||||
else:
|
||||
self.color = HobColors.GRAY
|
||||
|
||||
class HobTabLabel(gtk.HBox):
|
||||
def __init__(self, text, count=0):
|
||||
gtk.HBox.__init__(self, False, 0)
|
||||
self.indicator = HobIndicator(count)
|
||||
self.indicator.show()
|
||||
self.pack_end(self.indicator, False, False)
|
||||
self.lbl = gtk.Label(text)
|
||||
self.lbl.set_alignment(0.0, 0.5)
|
||||
self.lbl.show()
|
||||
self.pack_end(self.lbl, True, True, 6)
|
||||
|
||||
def set_count(self, count):
|
||||
self.indicator.set_count(count)
|
||||
|
||||
def set_active(self, active=True):
|
||||
self.indicator.set_active(active)
|
||||
|
||||
class HobNotebook(gtk.Notebook):
|
||||
def __init__(self):
|
||||
gtk.Notebook.__init__(self)
|
||||
self.set_property('homogeneous', True)
|
||||
gtk.DrawingArea.__init__(self)
|
||||
self.children = []
|
||||
|
||||
self.pages = []
|
||||
self.tab_width = 140
|
||||
self.tab_height = 52
|
||||
self.tab_x = 10
|
||||
self.tab_y = 0
|
||||
|
||||
self.width = 500
|
||||
self.height = 53
|
||||
self.tab_w_ratio = 140 * 1.0/500
|
||||
self.tab_h_ratio = 52 * 1.0/53
|
||||
self.set_size_request(self.width, self.height)
|
||||
|
||||
self.current_child = None
|
||||
self.font = self.get_style().font_desc
|
||||
self.font.set_size(pango.SCALE * 13)
|
||||
self.update_children_text_layout_and_bg_color()
|
||||
|
||||
self.blank_rectangle = None
|
||||
self.tab_pressed = False
|
||||
|
||||
self.set_property('can-focus', True)
|
||||
self.set_events(gtk.gdk.EXPOSURE_MASK | gtk.gdk.POINTER_MOTION_MASK |
|
||||
gtk.gdk.BUTTON1_MOTION_MASK | gtk.gdk.BUTTON_PRESS_MASK |
|
||||
gtk.gdk.BUTTON_RELEASE_MASK)
|
||||
|
||||
self.connect("expose-event", self.on_draw)
|
||||
self.connect("button-press-event", self.button_pressed_cb)
|
||||
self.connect("button-release-event", self.button_released_cb)
|
||||
self.connect("query-tooltip", self.query_tooltip_cb)
|
||||
self.show_all()
|
||||
|
||||
def button_released_cb(self, widget, event):
|
||||
self.tab_pressed = False
|
||||
self.queue_draw()
|
||||
|
||||
def button_pressed_cb(self, widget, event):
|
||||
if event.type == gtk.gdk._2BUTTON_PRESS:
|
||||
return
|
||||
|
||||
result = False
|
||||
if self.is_focus() or event.type == gtk.gdk.BUTTON_PRESS:
|
||||
x, y = event.get_coords()
|
||||
# check which tab be clicked
|
||||
for child in self.children:
|
||||
if (child["x"] < x) and (x < child["x"] + self.tab_width) \
|
||||
and (child["y"] < y) and (y < child["y"] + self.tab_height):
|
||||
self.current_child = child
|
||||
result = True
|
||||
self.grab_focus()
|
||||
break
|
||||
|
||||
# check the blank area is focus in or not
|
||||
if (self.blank_rectangle) and (self.blank_rectangle.x > 0) and (self.blank_rectangle.y > 0):
|
||||
if (self.blank_rectangle.x < x) and (x < self.blank_rectangle.x + self.blank_rectangle.width) \
|
||||
and (self.blank_rectangle.y < y) and (y < self.blank_rectangle.y + self.blank_rectangle.height):
|
||||
self.grab_focus()
|
||||
|
||||
if result == True:
|
||||
page = self.current_child["toggled_page"]
|
||||
self.emit("tab-switched", page)
|
||||
self.tab_pressed = True
|
||||
self.queue_draw()
|
||||
|
||||
def update_children_size(self):
|
||||
# calculate the size of tabs
|
||||
self.tab_width = int(self.width * self.tab_w_ratio)
|
||||
self.tab_height = int(self.height * self.tab_h_ratio)
|
||||
for i, child in enumerate(self.children):
|
||||
child["x"] = self.tab_x + i * self.tab_width
|
||||
child["y"] = self.tab_y
|
||||
|
||||
if self.blank_rectangle:
|
||||
self.resize_blank_rectangle()
|
||||
|
||||
def resize_blank_rectangle(self):
|
||||
width = self.width - self.tab_width * len(self.children) - self.tab_x
|
||||
x = self.tab_x + self.tab_width * len(self.children)
|
||||
hpadding = vpadding = 5
|
||||
self.blank_rectangle = self.set_blank_size(x + hpadding, self.tab_y + vpadding,
|
||||
width - 2 * hpadding, self.tab_height - 2 * vpadding)
|
||||
|
||||
def update_children_text_layout_and_bg_color(self):
|
||||
style = self.get_style().copy()
|
||||
color = style.base[gtk.STATE_NORMAL]
|
||||
for child in self.children:
|
||||
pangolayout = self.create_pango_layout(child["title"])
|
||||
pangolayout.set_font_description(self.font)
|
||||
child["title_layout"] = pangolayout
|
||||
child["r"] = color.red
|
||||
child["g"] = color.green
|
||||
child["b"] = color.blue
|
||||
|
||||
def append_tab_child(self, title, page, tooltip=""):
|
||||
num = len(self.children) + 1
|
||||
self.tab_width = self.tab_width * len(self.children) / num
|
||||
|
||||
i = 0
|
||||
for i, child in enumerate(self.children):
|
||||
child["x"] = self.tab_x + i * self.tab_width
|
||||
i += 1
|
||||
|
||||
x = self.tab_x + i * self.tab_width
|
||||
y = self.tab_y
|
||||
pangolayout = self.create_pango_layout(title)
|
||||
pangolayout.set_font_description(self.font)
|
||||
color = self.style.base[gtk.STATE_NORMAL]
|
||||
new_one = {
|
||||
"x" : x,
|
||||
"y" : y,
|
||||
"r" : color.red,
|
||||
"g" : color.green,
|
||||
"b" : color.blue,
|
||||
"title_layout" : pangolayout,
|
||||
"toggled_page" : page,
|
||||
"title" : title,
|
||||
"indicator_show" : False,
|
||||
"indicator_number" : 0,
|
||||
"tooltip_markup" : tooltip,
|
||||
}
|
||||
self.children.append(new_one)
|
||||
if tooltip and (not self.props.has_tooltip):
|
||||
self.props.has_tooltip = True
|
||||
# set the default current child
|
||||
if not self.current_child:
|
||||
self.current_child = new_one
|
||||
|
||||
def on_draw(self, widget, event):
|
||||
cr = widget.window.cairo_create()
|
||||
|
||||
self.width = self.allocation.width
|
||||
self.height = self.allocation.height
|
||||
|
||||
self.update_children_size()
|
||||
|
||||
self.draw_background(cr)
|
||||
self.draw_toggled_tab(cr)
|
||||
|
||||
for child in self.children:
|
||||
if child["indicator_show"] == True:
|
||||
self.draw_indicator(cr, child)
|
||||
|
||||
self.draw_tab_text(cr)
|
||||
|
||||
def draw_background(self, cr):
|
||||
style = self.get_style()
|
||||
|
||||
if self.is_focus():
|
||||
cr.set_source_color(style.base[gtk.STATE_SELECTED])
|
||||
else:
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
|
||||
y = 6
|
||||
h = self.height - 6 - 1
|
||||
gap = 1
|
||||
|
||||
w = self.children[0]["x"]
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(0, y, w - gap, h) # start rectangle
|
||||
cr.fill()
|
||||
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
cr.rectangle(w - gap, y, w, h) #first gap
|
||||
cr.fill()
|
||||
|
||||
w = self.tab_width
|
||||
for child in self.children:
|
||||
x = child["x"]
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(x, y, w - gap, h) # tab rectangle
|
||||
cr.fill()
|
||||
cr.set_source_color(style.base[gtk.STATE_NORMAL])
|
||||
cr.rectangle(x + w - gap, y, w, h) # gap
|
||||
cr.fill()
|
||||
|
||||
cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
cr.rectangle(x + w, y, self.width - x - w, h) # last rectangle
|
||||
cr.fill()
|
||||
|
||||
def draw_tab_text(self, cr):
|
||||
style = self.get_style()
|
||||
|
||||
for child in self.children:
|
||||
pangolayout = child["title_layout"]
|
||||
if pangolayout:
|
||||
fontw, fonth = pangolayout.get_pixel_size()
|
||||
# center pos
|
||||
off_x = (self.tab_width - fontw) / 2
|
||||
off_y = (self.tab_height - fonth) / 2
|
||||
x = child["x"] + off_x
|
||||
y = child["y"] + off_y
|
||||
if not child == self.current_child:
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), pangolayout, gtk.gdk.Color(HobColors.WHITE))
|
||||
else:
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), pangolayout)
|
||||
|
||||
def draw_toggled_tab(self, cr):
|
||||
if not self.current_child:
|
||||
return
|
||||
x = self.current_child["x"]
|
||||
y = self.current_child["y"]
|
||||
width = self.tab_width
|
||||
height = self.tab_height
|
||||
style = self.get_style()
|
||||
color = style.base[gtk.STATE_NORMAL]
|
||||
|
||||
r = height / 10
|
||||
if self.tab_pressed == True:
|
||||
for xoff, yoff, c1, c2 in [(1, 0, HobColors.SLIGHT_DARK, HobColors.DARK), (2, 0, HobColors.GRAY, HobColors.LIGHT_GRAY)]:
|
||||
cr.set_source_color(gtk.gdk.color_parse(c1))
|
||||
cr.move_to(x + xoff, y + height + yoff)
|
||||
cr.line_to(x + xoff, r + yoff)
|
||||
cr.arc(x + r + xoff, y + r + yoff, r, math.pi, 1.5*math.pi)
|
||||
cr.move_to(x + r + xoff, y + yoff)
|
||||
cr.line_to(x + width - r + xoff, y + yoff)
|
||||
cr.arc(x + width - r + xoff, y + r + yoff, r, 1.5*math.pi, 2*math.pi)
|
||||
cr.stroke()
|
||||
cr.set_source_color(gtk.gdk.color_parse(c2))
|
||||
cr.move_to(x + width + xoff, r + yoff)
|
||||
cr.line_to(x + width + xoff, y + height + yoff)
|
||||
cr.line_to(x + xoff, y + height + yoff)
|
||||
cr.stroke()
|
||||
x = x + 2
|
||||
y = y + 2
|
||||
cr.set_source_rgba(color.red, color.green, color.blue, 1)
|
||||
cr.move_to(x + r, y)
|
||||
cr.line_to(x + width - r , y)
|
||||
cr.arc(x + width - r, y + r, r, 1.5*math.pi, 2*math.pi)
|
||||
cr.move_to(x + width, r)
|
||||
cr.line_to(x + width, y + height)
|
||||
cr.line_to(x, y + height)
|
||||
cr.line_to(x, r)
|
||||
cr.arc(x + r, y + r, r, math.pi, 1.5*math.pi)
|
||||
cr.fill()
|
||||
|
||||
def draw_indicator(self, cr, child):
|
||||
text = ("%d" % child["indicator_number"])
|
||||
layout = self.create_pango_layout(text)
|
||||
layout.set_font_description(self.font)
|
||||
textw, texth = layout.get_pixel_size()
|
||||
# draw the back round area
|
||||
tab_x = child["x"]
|
||||
tab_y = child["y"]
|
||||
dest_w = int(32 * self.tab_w_ratio)
|
||||
dest_h = int(32 * self.tab_h_ratio)
|
||||
if dest_h < self.tab_height:
|
||||
dest_w = dest_h
|
||||
# x position is offset(tab_width*3/4 - icon_width/2) + start_pos(tab_x)
|
||||
x = tab_x + self.tab_width * 3/4 - dest_w/2
|
||||
y = tab_y + self.tab_height/2 - dest_h/2
|
||||
|
||||
r = min(dest_w, dest_h)/2
|
||||
if not child == self.current_child:
|
||||
color = cr.set_source_color(gtk.gdk.color_parse(HobColors.DEEP_RED))
|
||||
else:
|
||||
color = cr.set_source_color(gtk.gdk.color_parse(HobColors.GRAY))
|
||||
# check round back area can contain the text or not
|
||||
back_round_can_contain_width = float(2 * r * 0.707)
|
||||
if float(textw) > back_round_can_contain_width:
|
||||
xoff = (textw - int(back_round_can_contain_width)) / 2
|
||||
cr.move_to(x + r - xoff, y + r + r)
|
||||
cr.arc((x + r - xoff), (y + r), r, 0.5*math.pi, 1.5*math.pi)
|
||||
cr.fill() # left half round
|
||||
cr.rectangle((x + r - xoff), y, 2 * xoff, 2 * r)
|
||||
cr.fill() # center rectangle
|
||||
cr.arc((x + r + xoff), (y + r), r, 1.5*math.pi, 0.5*math.pi)
|
||||
cr.fill() # right half round
|
||||
else:
|
||||
cr.arc((x + r), (y + r), r, 0, 2*math.pi)
|
||||
cr.fill()
|
||||
# draw the number text
|
||||
x = x + (dest_w/2)-(textw/2)
|
||||
y = y + (dest_h/2) - (texth/2)
|
||||
cr.move_to(x, y)
|
||||
self.window.draw_layout(self.style.fg_gc[gtk.STATE_NORMAL], int(x), int(y), layout, gtk.gdk.Color(HobColors.WHITE))
|
||||
|
||||
def show_indicator_icon(self, child, number):
|
||||
child["indicator_show"] = True
|
||||
child["indicator_number"] = number
|
||||
self.queue_draw()
|
||||
|
||||
def hide_indicator_icon(self, child):
|
||||
child["indicator_show"] = False
|
||||
self.queue_draw()
|
||||
|
||||
def set_blank_size(self, x, y, w, h):
|
||||
if not self.blank_rectangle or self.blank_rectangle.x != x or self.blank_rectangle.width != w:
|
||||
self.emit("blank-area-changed", x, y, w, h)
|
||||
|
||||
return gtk.gdk.Rectangle(x, y, w, h)
|
||||
|
||||
def query_tooltip_cb(self, widget, x, y, keyboardtip, tooltip):
|
||||
if keyboardtip or (not tooltip):
|
||||
return False
|
||||
# check which tab be clicked
|
||||
for child in self.children:
|
||||
if (child["x"] < x) and (x < child["x"] + self.tab_width) \
|
||||
and (child["y"] < y) and (y < child["y"] + self.tab_height):
|
||||
tooltip.set_markup(child["tooltip_markup"])
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
class HobNotebook(gtk.VBox):
|
||||
|
||||
def __init__(self):
|
||||
gtk.VBox.__init__(self, False, 0)
|
||||
|
||||
self.notebook = gtk.Notebook()
|
||||
self.notebook.set_property('homogeneous', True)
|
||||
self.notebook.set_property('show-tabs', False)
|
||||
|
||||
self.tabbar = HobTabBar()
|
||||
self.tabbar.connect("tab-switched", self.tab_switched_cb)
|
||||
self.notebook.connect("page-added", self.page_added_cb)
|
||||
self.notebook.connect("page-removed", self.page_removed_cb)
|
||||
|
||||
self.search = None
|
||||
self.search_name = ""
|
||||
|
||||
self.connect("switch-page", self.page_changed_cb)
|
||||
self.tb = gtk.Table(1, 100, False)
|
||||
self.hbox= gtk.HBox(False, 0)
|
||||
self.hbox.pack_start(self.tabbar, True, True)
|
||||
self.tb.attach(self.hbox, 0, 100, 0, 1)
|
||||
|
||||
self.pack_start(self.tb, False, False)
|
||||
self.pack_start(self.notebook)
|
||||
|
||||
self.show_all()
|
||||
|
||||
def page_changed_cb(self, nb, page, page_num):
|
||||
for p, lbl in enumerate(self.pages):
|
||||
if p == page_num:
|
||||
lbl.set_active()
|
||||
else:
|
||||
lbl.set_active(False)
|
||||
|
||||
def append_page(self, child, tab_label, tab_tooltip=None):
|
||||
label = HobTabLabel(tab_label)
|
||||
if tab_tooltip:
|
||||
label.set_tooltip_text(tab_tooltip)
|
||||
label.set_active(False)
|
||||
self.pages.append(label)
|
||||
gtk.Notebook.append_page(self, child, label)
|
||||
def append_page(self, child, tab_label):
|
||||
self.notebook.set_current_page(self.notebook.append_page(child, tab_label))
|
||||
|
||||
def set_entry(self, name="Search:"):
|
||||
for child in self.tb.get_children():
|
||||
if child:
|
||||
self.tb.remove(child)
|
||||
|
||||
hbox_entry = gtk.HBox(False, 0)
|
||||
hbox_entry.show()
|
||||
|
||||
self.search = gtk.Entry()
|
||||
self.search_name = name
|
||||
style = self.search.get_style()
|
||||
@@ -475,20 +747,59 @@ class HobNotebook(gtk.Notebook):
|
||||
self.search.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
|
||||
self.search.connect("icon-release", self.set_search_entry_clear_cb)
|
||||
self.search.show()
|
||||
self.align = gtk.Alignment(xalign=1.0, yalign=0.7)
|
||||
self.align.add(self.search)
|
||||
self.align.show()
|
||||
hbox_entry.pack_end(self.align, False, False)
|
||||
self.tabbar.resize_blank_rectangle()
|
||||
|
||||
self.tb.attach(hbox_entry, 75, 100, 0, 1, xpadding=5)
|
||||
self.tb.attach(self.hbox, 0, 100, 0, 1)
|
||||
|
||||
self.tabbar.connect("blank-area-changed", self.blank_area_resize_cb)
|
||||
self.search.connect("focus-in-event", self.set_search_entry_editable_cb)
|
||||
self.search.connect("focus-out-event", self.set_search_entry_reset_cb)
|
||||
self.set_action_widget(self.search, gtk.PACK_END)
|
||||
|
||||
self.tb.show()
|
||||
|
||||
def show_indicator_icon(self, title, number):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(number)
|
||||
for child in self.tabbar.children:
|
||||
if child["toggled_page"] == -1:
|
||||
continue
|
||||
if child["title"] == title:
|
||||
self.tabbar.show_indicator_icon(child, number)
|
||||
|
||||
def hide_indicator_icon(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.set_count(0)
|
||||
for child in self.tabbar.children:
|
||||
if child["toggled_page"] == -1:
|
||||
continue
|
||||
if child["title"] == title:
|
||||
self.tabbar.hide_indicator_icon(child)
|
||||
|
||||
def tab_switched_cb(self, widget, page):
|
||||
self.notebook.set_current_page(page)
|
||||
|
||||
def page_added_cb(self, notebook, notebook_child, page):
|
||||
if not notebook:
|
||||
return
|
||||
title = notebook.get_tab_label_text(notebook_child)
|
||||
label = notebook.get_tab_label(notebook_child)
|
||||
tooltip_markup = label.get_tooltip_markup()
|
||||
if not title:
|
||||
return
|
||||
for child in self.tabbar.children:
|
||||
if child["title"] == title:
|
||||
child["toggled_page"] = page
|
||||
return
|
||||
self.tabbar.append_tab_child(title, page, tooltip_markup)
|
||||
|
||||
def page_removed_cb(self, notebook, notebook_child, page, title=""):
|
||||
for child in self.tabbar.children:
|
||||
if child["title"] == title:
|
||||
child["toggled_page"] = -1
|
||||
|
||||
def blank_area_resize_cb(self, widget, request_x, request_y, request_width, request_height):
|
||||
self.search.set_size_request(request_width, request_height)
|
||||
|
||||
def set_search_entry_editable_cb(self, search, event):
|
||||
search.set_editable(True)
|
||||
@@ -508,14 +819,7 @@ class HobNotebook(gtk.Notebook):
|
||||
self.reset_entry(search)
|
||||
|
||||
def set_search_entry_clear_cb(self, search, icon_pos, event):
|
||||
if search.get_editable() == True:
|
||||
search.set_text("")
|
||||
|
||||
def set_page(self, title):
|
||||
for child in self.pages:
|
||||
if child.lbl.get_label() == title:
|
||||
child.grab_focus()
|
||||
self.set_current_page(self.page_num(child))
|
||||
self.reset_entry(search)
|
||||
|
||||
class HobWarpCellRendererText(gtk.CellRendererText):
|
||||
def __init__(self, col_number):
|
||||
@@ -753,7 +1057,7 @@ class HobCellRendererPixbuf(gtk.CellRendererPixbuf):
|
||||
if self.control.is_active():
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, True)
|
||||
else:
|
||||
self.control.start_run(200, 0, 0, 1000, 150, tree)
|
||||
self.control.start_run(200, 0, 0, 1000, 200, tree)
|
||||
else:
|
||||
self.control.remove_running_cell_area(cell_area)
|
||||
self.control.on_draw_pixbuf_cb(pix, window.cairo_create(), x, y, w, h, False)
|
||||
@@ -780,17 +1084,11 @@ class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
gtk.CellRendererToggle.__init__(self)
|
||||
self.ctrl = HobCellRendererController(is_draw_row=True)
|
||||
self.ctrl.running_mode = self.ctrl.MODE_ONE_SHORT
|
||||
self.cell_attr = {"fadeout": False, "number_of_children": 0}
|
||||
self.cell_attr = {"fadeout": False}
|
||||
|
||||
def do_render(self, window, widget, background_area, cell_area, expose_area, flags):
|
||||
if (not self.ctrl) or (not widget):
|
||||
return
|
||||
|
||||
if flags & gtk.CELL_RENDERER_SELECTED:
|
||||
state = gtk.STATE_SELECTED
|
||||
else:
|
||||
state = gtk.STATE_NORMAL
|
||||
|
||||
if self.ctrl.is_active():
|
||||
path = widget.get_path_at_pos(cell_area.x + cell_area.width/2, cell_area.y + cell_area.height/2)
|
||||
# sometimes the parameters of cell_area will be a negative number,such as pull up down the scroll bar
|
||||
@@ -799,23 +1097,14 @@ class HobCellRendererToggle(gtk.CellRendererToggle):
|
||||
path = path[0]
|
||||
if path in self.ctrl.running_cell_areas:
|
||||
cr = window.cairo_create()
|
||||
color = widget.get_style().base[state]
|
||||
color = gtk.gdk.Color(HobColors.WHITE)
|
||||
|
||||
row_x, _, row_width, _ = widget.get_visible_rect()
|
||||
border_y = self.get_property("ypad")
|
||||
self.ctrl.on_draw_fadeinout_cb(cr, color, row_x, cell_area.y - border_y, row_width, \
|
||||
cell_area.height + border_y * 2, self.cell_attr["fadeout"])
|
||||
# draw number of a group
|
||||
if self.cell_attr["number_of_children"]:
|
||||
text = "%d pkg" % self.cell_attr["number_of_children"]
|
||||
pangolayout = widget.create_pango_layout(text)
|
||||
textw, texth = pangolayout.get_pixel_size()
|
||||
x = cell_area.x + (cell_area.width/2) - (textw/2)
|
||||
y = cell_area.y + (cell_area.height/2) - (texth/2)
|
||||
|
||||
widget.style.paint_layout(window, state, True, cell_area, widget, "checkbox", x, y, pangolayout)
|
||||
else:
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
return gtk.CellRendererToggle.do_render(self, window, widget, background_area, cell_area, expose_area, flags)
|
||||
|
||||
'''delay: normally delay time is 1000ms
|
||||
cell_list: whilch cells need to be render
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
|
||||
import gtk
|
||||
import glib
|
||||
import re
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobImageButton, HobInfoButton, HobAltButton, HobButton
|
||||
@@ -34,9 +33,6 @@ from bb.ui.crumbs.hobpages import HobPage
|
||||
#
|
||||
class ImageConfigurationPage (HobPage):
|
||||
|
||||
__dummy_machine__ = "--select a machine--"
|
||||
__dummy_image__ = "--select a base image--"
|
||||
|
||||
def __init__(self, builder):
|
||||
super(ImageConfigurationPage, self).__init__(builder, "Image configuration")
|
||||
|
||||
@@ -135,9 +131,8 @@ class ImageConfigurationPage (HobPage):
|
||||
self._pack_components(pack_config_build_button = True)
|
||||
self.set_config_machine_layout(show_progress_bar = False)
|
||||
self.set_config_baseimg_layout()
|
||||
self.set_rcppkg_layout()
|
||||
self.show_all()
|
||||
if self.builder.recipe_model.get_selected_image() == self.builder.recipe_model.__custom_image__:
|
||||
self.just_bake_button.hide()
|
||||
|
||||
def create_config_machine(self):
|
||||
self.machine_title = gtk.Label()
|
||||
@@ -152,6 +147,7 @@ class ImageConfigurationPage (HobPage):
|
||||
self.machine_title_desc.set_markup(mark)
|
||||
|
||||
self.machine_combo = gtk.combo_box_new_text()
|
||||
self.machine_combo.set_wrap_width(1)
|
||||
self.machine_combo.connect("changed", self.machine_combo_changed_cb)
|
||||
|
||||
icon_file = hic.ICON_LAYERS_DISPLAY_FILE
|
||||
@@ -167,12 +163,13 @@ class ImageConfigurationPage (HobPage):
|
||||
markup += "dev-manual.html#understanding-and-using-layers\">reference manual</a>."
|
||||
self.layer_info_icon = HobInfoButton(markup, self.get_parent())
|
||||
|
||||
# self.progress_box = gtk.HBox(False, 6)
|
||||
self.progress_box = gtk.HBox(False, 6)
|
||||
self.progress_bar = HobProgressBar()
|
||||
# self.progress_box.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.progress_box.pack_start(self.progress_bar, expand=True, fill=True)
|
||||
self.stop_button = HobAltButton("Stop")
|
||||
self.stop_button.connect("clicked", self.stop_button_clicked_cb)
|
||||
# self.progress_box.pack_end(stop_button, expand=False, fill=False)
|
||||
self.progress_box.pack_end(self.stop_button, expand=False, fill=False)
|
||||
|
||||
self.machine_separator = gtk.HSeparator()
|
||||
|
||||
def set_config_machine_layout(self, show_progress_bar = False):
|
||||
@@ -182,9 +179,7 @@ class ImageConfigurationPage (HobPage):
|
||||
self.gtable.attach(self.layer_button, 14, 36, 7, 12)
|
||||
self.gtable.attach(self.layer_info_icon, 36, 40, 7, 11)
|
||||
if show_progress_bar:
|
||||
#self.gtable.attach(self.progress_box, 0, 40, 15, 18)
|
||||
self.gtable.attach(self.progress_bar, 0, 37, 15, 18)
|
||||
self.gtable.attach(self.stop_button, 37, 40, 15, 18, 0, 0)
|
||||
self.gtable.attach(self.progress_box, 0, 40, 15, 19)
|
||||
self.gtable.attach(self.machine_separator, 0, 40, 13, 14)
|
||||
|
||||
def create_config_baseimg(self):
|
||||
@@ -201,21 +196,28 @@ class ImageConfigurationPage (HobPage):
|
||||
self.image_title_desc.set_markup(mark)
|
||||
|
||||
self.image_combo = gtk.combo_box_new_text()
|
||||
self.image_combo.set_wrap_width(1)
|
||||
self.image_combo_id = self.image_combo.connect("changed", self.image_combo_changed_cb)
|
||||
|
||||
self.image_desc = gtk.Label()
|
||||
self.image_desc.set_alignment(0.0, 0.5)
|
||||
self.image_desc.set_size_request(256, -1)
|
||||
self.image_desc.set_justify(gtk.JUSTIFY_LEFT)
|
||||
self.image_desc.set_line_wrap(True)
|
||||
|
||||
# button to view recipes
|
||||
icon_file = hic.ICON_RCIPE_DISPLAY_FILE
|
||||
hover_file = hic.ICON_RCIPE_HOVER_FILE
|
||||
self.view_adv_configuration_button = HobImageButton("Advanced configuration",
|
||||
"Select image types, package formats, etc",
|
||||
icon_file, hover_file)
|
||||
self.view_adv_configuration_button.connect("clicked", self.view_adv_configuration_button_clicked_cb)
|
||||
self.view_recipes_button = HobImageButton("View recipes",
|
||||
"Add/remove recipes and tasks",
|
||||
icon_file, hover_file)
|
||||
self.view_recipes_button.connect("clicked", self.view_recipes_button_clicked_cb)
|
||||
|
||||
# button to view packages
|
||||
icon_file = hic.ICON_PACKAGES_DISPLAY_FILE
|
||||
hover_file = hic.ICON_PACKAGES_HOVER_FILE
|
||||
self.view_packages_button = HobImageButton("View packages",
|
||||
"Add/remove previously built packages",
|
||||
icon_file, hover_file)
|
||||
self.view_packages_button.connect("clicked", self.view_packages_button_clicked_cb)
|
||||
|
||||
self.image_separator = gtk.HSeparator()
|
||||
|
||||
@@ -223,27 +225,32 @@ class ImageConfigurationPage (HobPage):
|
||||
self.gtable.attach(self.image_title, 0, 40, 15, 17)
|
||||
self.gtable.attach(self.image_title_desc, 0, 40, 18, 22)
|
||||
self.gtable.attach(self.image_combo, 0, 12, 23, 26)
|
||||
self.gtable.attach(self.image_desc, 0, 12, 27, 33)
|
||||
self.gtable.attach(self.view_adv_configuration_button, 14, 36, 23, 28)
|
||||
self.gtable.attach(self.image_desc, 13, 38, 23, 28)
|
||||
self.gtable.attach(self.image_separator, 0, 40, 35, 36)
|
||||
|
||||
def set_rcppkg_layout(self):
|
||||
self.gtable.attach(self.view_recipes_button, 0, 20, 28, 33)
|
||||
self.gtable.attach(self.view_packages_button, 20, 40, 28, 33)
|
||||
|
||||
def create_config_build_button(self):
|
||||
# Create the "Build packages" and "Build image" buttons at the bottom
|
||||
button_box = gtk.HBox(False, 6)
|
||||
|
||||
# create button "Build image"
|
||||
self.just_bake_button = HobButton("Build image")
|
||||
#self.just_bake_button.set_size_request(205, 49)
|
||||
self.just_bake_button.set_tooltip_text("Build target image")
|
||||
self.just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
|
||||
button_box.pack_end(self.just_bake_button, expand=False, fill=False)
|
||||
just_bake_button = HobButton("Build image")
|
||||
just_bake_button.set_size_request(205, 49)
|
||||
just_bake_button.set_tooltip_text("Build target image")
|
||||
just_bake_button.connect("clicked", self.just_bake_button_clicked_cb)
|
||||
button_box.pack_end(just_bake_button, expand=False, fill=False)
|
||||
|
||||
# create button "Edit Image"
|
||||
self.edit_image_button = HobAltButton("Edit image")
|
||||
#self.edit_image_button.set_size_request(205, 49)
|
||||
self.edit_image_button.set_tooltip_text("Edit target image")
|
||||
self.edit_image_button.connect("clicked", self.edit_image_button_clicked_cb)
|
||||
button_box.pack_end(self.edit_image_button, expand=False, fill=False)
|
||||
label = gtk.Label(" or ")
|
||||
button_box.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Build Packages"
|
||||
build_packages_button = HobAltButton("Build packages")
|
||||
build_packages_button.connect("clicked", self.build_packages_button_clicked_cb)
|
||||
build_packages_button.set_tooltip_text("Build recipes into packages")
|
||||
button_box.pack_end(build_packages_button, expand=False, fill=False)
|
||||
|
||||
return button_box
|
||||
|
||||
@@ -252,18 +259,14 @@ class ImageConfigurationPage (HobPage):
|
||||
|
||||
def machine_combo_changed_cb(self, machine_combo):
|
||||
combo_item = machine_combo.get_active_text()
|
||||
if not combo_item or combo_item == self.__dummy_machine__:
|
||||
if not combo_item:
|
||||
return
|
||||
|
||||
# remove __dummy_machine__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = machine_combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_machine__):
|
||||
machine_combo.remove_text(0)
|
||||
|
||||
self.builder.configuration.curr_mach = combo_item
|
||||
if self.machine_combo_changed_by_manual:
|
||||
self.builder.configuration.clear_selection()
|
||||
self.builder.configuration.selected_image = None
|
||||
self.builder.configuration.selected_recipes = []
|
||||
self.builder.configuration.selected_packages = []
|
||||
# reset machine_combo_changed_by_manual
|
||||
self.machine_combo_changed_by_manual = True
|
||||
|
||||
@@ -271,13 +274,13 @@ class ImageConfigurationPage (HobPage):
|
||||
self.builder.populate_recipe_package_info_async()
|
||||
|
||||
def update_machine_combo(self):
|
||||
all_machines = [self.__dummy_machine__] + self.builder.parameters.all_machines
|
||||
all_machines = self.builder.parameters.all_machines
|
||||
|
||||
model = self.machine_combo.get_model()
|
||||
model.clear()
|
||||
for machine in all_machines:
|
||||
self.machine_combo.append_text(machine)
|
||||
self.machine_combo.set_active(0)
|
||||
self.machine_combo.set_active(-1)
|
||||
|
||||
def switch_machine_combo(self):
|
||||
self.machine_combo_changed_by_manual = False
|
||||
@@ -288,15 +291,10 @@ class ImageConfigurationPage (HobPage):
|
||||
self.machine_combo.set_active(active)
|
||||
return
|
||||
active += 1
|
||||
self.machine_combo.set_active(-1)
|
||||
|
||||
if model[0][0] != self.__dummy_machine__:
|
||||
self.machine_combo.insert_text(0, self.__dummy_machine__)
|
||||
|
||||
self.machine_combo.set_active(0)
|
||||
|
||||
def update_image_desc(self):
|
||||
def update_image_desc(self, selected_image):
|
||||
desc = ""
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if selected_image and selected_image in self.builder.recipe_model.pn_path.keys():
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
@@ -313,15 +311,9 @@ class ImageConfigurationPage (HobPage):
|
||||
def image_combo_changed_cb(self, combo):
|
||||
self.builder.window_sensitive(False)
|
||||
selected_image = self.image_combo.get_active_text()
|
||||
if not selected_image or (selected_image == self.__dummy_image__):
|
||||
if not selected_image:
|
||||
return
|
||||
|
||||
# remove __dummy_image__ item from the store list after first user selection
|
||||
# because it is no longer valid
|
||||
combo_store = combo.get_model()
|
||||
if len(combo_store) and (combo_store[0][0] == self.__dummy_image__):
|
||||
combo.remove_text(0)
|
||||
|
||||
self.builder.customized = False
|
||||
|
||||
selected_recipes = []
|
||||
@@ -329,16 +321,13 @@ class ImageConfigurationPage (HobPage):
|
||||
image_path = self.builder.recipe_model.pn_path[selected_image]
|
||||
image_iter = self.builder.recipe_model.get_iter(image_path)
|
||||
selected_packages = self.builder.recipe_model.get_value(image_iter, self.builder.recipe_model.COL_INSTALL).split()
|
||||
self.update_image_desc()
|
||||
self.update_image_desc(selected_image)
|
||||
|
||||
self.builder.recipe_model.reset()
|
||||
self.builder.package_model.reset()
|
||||
|
||||
self.show_baseimg_selected()
|
||||
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
self.just_bake_button.hide()
|
||||
|
||||
glib.idle_add(self.image_combo_changed_idle_cb, selected_image, selected_recipes, selected_packages)
|
||||
|
||||
def _image_combo_connect_signal(self):
|
||||
@@ -355,63 +344,32 @@ class ImageConfigurationPage (HobPage):
|
||||
# populate image combo
|
||||
filter = {RecipeListModel.COL_TYPE : ['image']}
|
||||
image_model = recipe_model.tree_model(filter)
|
||||
image_model.set_sort_column_id(recipe_model.COL_NAME, gtk.SORT_ASCENDING)
|
||||
active = 0
|
||||
cnt = 1
|
||||
|
||||
white_pattern = []
|
||||
if self.builder.parameters.image_white_pattern:
|
||||
for i in self.builder.parameters.image_white_pattern.split():
|
||||
white_pattern.append(re.compile(i))
|
||||
|
||||
black_pattern = []
|
||||
if self.builder.parameters.image_black_pattern:
|
||||
for i in self.builder.parameters.image_black_pattern.split():
|
||||
black_pattern.append(re.compile(i))
|
||||
black_pattern.append(re.compile("hob-image"))
|
||||
active = -1
|
||||
cnt = 0
|
||||
|
||||
it = image_model.get_iter_first()
|
||||
self._image_combo_disconnect_signal()
|
||||
model = self.image_combo.get_model()
|
||||
model.clear()
|
||||
# Set a indicator text to combo store when first open
|
||||
self.image_combo.append_text(self.__dummy_image__)
|
||||
# append and set active
|
||||
while it:
|
||||
path = image_model.get_path(it)
|
||||
it = image_model.iter_next(it)
|
||||
image_name = image_model[path][recipe_model.COL_NAME]
|
||||
if image_name == self.builder.recipe_model.__custom_image__:
|
||||
if image_name == self.builder.recipe_model.__dummy_image__:
|
||||
continue
|
||||
|
||||
if black_pattern:
|
||||
allow = True
|
||||
for pattern in black_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = False
|
||||
break
|
||||
elif white_pattern:
|
||||
allow = False
|
||||
for pattern in white_pattern:
|
||||
if pattern.search(image_name):
|
||||
allow = True
|
||||
break
|
||||
else:
|
||||
allow = True
|
||||
|
||||
if allow:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
|
||||
self.image_combo.append_text(self.builder.recipe_model.__custom_image__)
|
||||
if selected_image == self.builder.recipe_model.__custom_image__:
|
||||
self.image_combo.append_text(image_name)
|
||||
if image_name == selected_image:
|
||||
active = cnt
|
||||
cnt = cnt + 1
|
||||
self.image_combo.append_text(self.builder.recipe_model.__dummy_image__)
|
||||
if selected_image == self.builder.recipe_model.__dummy_image__:
|
||||
active = cnt
|
||||
|
||||
self.image_combo.set_active(-1)
|
||||
self.image_combo.set_active(active)
|
||||
|
||||
if active != 0:
|
||||
if active != -1:
|
||||
self.show_baseimg_selected()
|
||||
|
||||
self._image_combo_connect_signal()
|
||||
@@ -419,20 +377,18 @@ class ImageConfigurationPage (HobPage):
|
||||
def layer_button_clicked_cb(self, button):
|
||||
# Create a layer selection dialog
|
||||
self.builder.show_layer_selection_dialog()
|
||||
|
||||
def view_adv_configuration_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
self.builder.reparse_post_adv_settings()
|
||||
|
||||
def view_recipes_button_clicked_cb(self, button):
|
||||
self.builder.show_recipes()
|
||||
|
||||
def view_packages_button_clicked_cb(self, button):
|
||||
self.builder.show_packages()
|
||||
|
||||
def just_bake_button_clicked_cb(self, button):
|
||||
self.builder.just_bake()
|
||||
|
||||
def edit_image_button_clicked_cb(self, button):
|
||||
self.builder.show_recipes()
|
||||
def build_packages_button_clicked_cb(self, button):
|
||||
self.builder.build_packages()
|
||||
|
||||
def template_button_clicked_cb(self, button):
|
||||
response, path = self.builder.show_load_template_dialog()
|
||||
@@ -446,7 +402,7 @@ class ImageConfigurationPage (HobPage):
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_simple_settings_dialog()
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
|
||||
@@ -25,96 +25,34 @@ import gtk
|
||||
from bb.ui.crumbs.hobcolor import HobColors
|
||||
from bb.ui.crumbs.hobwidget import hic, HobViewTable, HobAltButton, HobButton
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
import subprocess
|
||||
from bb.ui.crumbs.hig import CrumbsDialog
|
||||
|
||||
#
|
||||
# ImageDetailsPage
|
||||
#
|
||||
class ImageDetailsPage (HobPage):
|
||||
|
||||
__columns__ = [{
|
||||
'col_name' : 'Image name',
|
||||
'col_id' : 0,
|
||||
'col_style': 'text',
|
||||
'col_min' : 500,
|
||||
'col_max' : 500
|
||||
}, {
|
||||
'col_name' : 'Image size',
|
||||
'col_id' : 1,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}, {
|
||||
'col_name' : 'Select',
|
||||
'col_id' : 2,
|
||||
'col_style': 'radio toggle',
|
||||
'col_min' : 100,
|
||||
'col_max' : 100
|
||||
}]
|
||||
|
||||
class DetailBox (gtk.EventBox):
|
||||
def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, button2=None, color = HobColors.LIGHT_GRAY):
|
||||
gtk.EventBox.__init__(self)
|
||||
|
||||
# set color
|
||||
style = self.get_style().copy()
|
||||
style.bg[gtk.STATE_NORMAL] = self.get_colormap().alloc_color(color, False, False)
|
||||
self.set_style(style)
|
||||
|
||||
self.row = gtk.Table(1, 2, False)
|
||||
self.row.set_border_width(10)
|
||||
self.add(self.row)
|
||||
|
||||
total_rows = 0
|
||||
if widget:
|
||||
total_rows = 10
|
||||
if varlist and vallist:
|
||||
# pack the icon and the text on the left
|
||||
total_rows += len(varlist)
|
||||
self.table = gtk.Table(total_rows, 20, True)
|
||||
self.table.set_row_spacings(6)
|
||||
self.table.set_size_request(100, -1)
|
||||
self.row.attach(self.table, 0, 1, 0, 1, xoptions=gtk.FILL|gtk.EXPAND, yoptions=gtk.FILL)
|
||||
|
||||
colid = 0
|
||||
rowid = 0
|
||||
self.line_widgets = {}
|
||||
if icon:
|
||||
self.table.attach(icon, colid, colid + 2, 0, 1)
|
||||
colid = colid + 2
|
||||
if widget:
|
||||
self.table.attach(widget, colid, 20, 0, 10)
|
||||
rowid = 10
|
||||
if varlist and vallist:
|
||||
for row in range(rowid, total_rows):
|
||||
index = row - rowid
|
||||
self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
|
||||
self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
|
||||
# pack the button on the right
|
||||
if button:
|
||||
self.bbox = gtk.VBox()
|
||||
self.bbox.pack_start(button, expand=True, fill=False)
|
||||
if button2:
|
||||
self.bbox.pack_start(button2, expand=True, fill=False)
|
||||
self.bbox.set_size_request(150,-1)
|
||||
self.row.attach(self.bbox, 1, 2, 0, 1, xoptions=gtk.FILL, yoptions=gtk.EXPAND)
|
||||
|
||||
def update_line_widgets(self, variable, value):
|
||||
if len(self.line_widgets) == 0:
|
||||
return
|
||||
if not isinstance(self.line_widgets[variable], gtk.Label):
|
||||
return
|
||||
self.line_widgets[variable].set_markup(self.format_line(variable, value))
|
||||
|
||||
def wrap_line(self, inputs):
|
||||
# wrap the long text of inputs
|
||||
wrap_width_chars = 75
|
||||
outputs = ""
|
||||
tmps = inputs
|
||||
less_chars = len(inputs)
|
||||
while (less_chars - wrap_width_chars) > 0:
|
||||
less_chars -= wrap_width_chars
|
||||
outputs += tmps[:wrap_width_chars] + "\n "
|
||||
tmps = inputs[less_chars:]
|
||||
outputs += tmps
|
||||
return outputs
|
||||
|
||||
def format_line(self, variable, value):
|
||||
wraped_value = self.wrap_line(value)
|
||||
markup = "<span weight=\'bold\'>%s</span>" % variable
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
|
||||
return markup
|
||||
|
||||
def text2label(self, variable, value):
|
||||
# append the name:value to the left box
|
||||
# such as "Name: hob-core-minimal-variant-2011-12-15-beagleboard"
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_markup(self.format_line(variable, value))
|
||||
return label
|
||||
|
||||
class BuildDetailBox (gtk.EventBox):
|
||||
def __init__(self, varlist = None, vallist = None, icon = None, color = HobColors.LIGHT_GRAY):
|
||||
def __init__(self, widget = None, varlist = None, vallist = None, icon = None, button = None, color = HobColors.LIGHT_GRAY):
|
||||
gtk.EventBox.__init__(self)
|
||||
|
||||
# set color
|
||||
@@ -123,30 +61,34 @@ class ImageDetailsPage (HobPage):
|
||||
self.set_style(style)
|
||||
|
||||
self.hbox = gtk.HBox()
|
||||
self.hbox.set_border_width(10)
|
||||
self.hbox.set_border_width(15)
|
||||
self.add(self.hbox)
|
||||
|
||||
total_rows = 0
|
||||
if varlist and vallist:
|
||||
if widget:
|
||||
row = 1
|
||||
elif varlist and vallist:
|
||||
# pack the icon and the text on the left
|
||||
total_rows += len(varlist)
|
||||
self.table = gtk.Table(total_rows, 20, True)
|
||||
self.table.set_row_spacings(6)
|
||||
row = len(varlist)
|
||||
self.table = gtk.Table(row, 20, True)
|
||||
self.table.set_size_request(100, -1)
|
||||
self.hbox.pack_start(self.table, expand=True, fill=True, padding=15)
|
||||
|
||||
colid = 0
|
||||
rowid = 0
|
||||
self.line_widgets = {}
|
||||
if icon:
|
||||
self.table.attach(icon, colid, colid + 2, 0, 1)
|
||||
colid = colid + 2
|
||||
if varlist and vallist:
|
||||
for row in range(rowid, total_rows):
|
||||
index = row - rowid
|
||||
self.line_widgets[varlist[index]] = self.text2label(varlist[index], vallist[index])
|
||||
self.table.attach(self.line_widgets[varlist[index]], colid, 20, row, row + 1)
|
||||
|
||||
if widget:
|
||||
self.table.attach(widget, colid, 20, 0, 1)
|
||||
elif varlist and vallist:
|
||||
for line in range(0, row):
|
||||
self.line_widgets[varlist[line]] = self.text2label(varlist[line], vallist[line])
|
||||
self.table.attach(self.line_widgets[varlist[line]], colid, 20, line, line + 1)
|
||||
|
||||
# pack the button on the right
|
||||
if button:
|
||||
self.hbox.pack_end(button, expand=False, fill=False)
|
||||
|
||||
def update_line_widgets(self, variable, value):
|
||||
if len(self.line_widgets) == 0:
|
||||
return
|
||||
@@ -154,23 +96,9 @@ class ImageDetailsPage (HobPage):
|
||||
return
|
||||
self.line_widgets[variable].set_markup(self.format_line(variable, value))
|
||||
|
||||
def wrap_line(self, inputs):
|
||||
# wrap the long text of inputs
|
||||
wrap_width_chars = 75
|
||||
outputs = ""
|
||||
tmps = inputs
|
||||
less_chars = len(inputs)
|
||||
while (less_chars - wrap_width_chars) > 0:
|
||||
less_chars -= wrap_width_chars
|
||||
outputs += tmps[:wrap_width_chars] + "\n "
|
||||
tmps = inputs[less_chars:]
|
||||
outputs += tmps
|
||||
return outputs
|
||||
|
||||
def format_line(self, variable, value):
|
||||
wraped_value = self.wrap_line(value)
|
||||
markup = "<span weight=\'bold\'>%s</span>" % variable
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % wraped_value
|
||||
markup += "<span weight=\'normal\' foreground=\'#1c1c1c\' font_desc=\'14px\'>%s</span>" % value
|
||||
return markup
|
||||
|
||||
def text2label(self, variable, value):
|
||||
@@ -184,7 +112,7 @@ class ImageDetailsPage (HobPage):
|
||||
def __init__(self, builder):
|
||||
super(ImageDetailsPage, self).__init__(builder, "Image details")
|
||||
|
||||
self.image_store = []
|
||||
self.image_store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN)
|
||||
self.button_ids = {}
|
||||
self.details_bottom_buttons = gtk.HBox(False, 6)
|
||||
self.create_visual_elements()
|
||||
@@ -229,30 +157,27 @@ class ImageDetailsPage (HobPage):
|
||||
self.details_bottom_buttons.remove(child)
|
||||
|
||||
def show_page(self, step):
|
||||
self.build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
build_succeeded = (step == self.builder.IMAGE_GENERATED)
|
||||
image_addr = self.builder.parameters.image_addr
|
||||
image_names = self.builder.parameters.image_names
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
machine = self.builder.configuration.curr_mach
|
||||
base_image = self.builder.recipe_model.get_selected_image()
|
||||
layers = self.builder.configuration.layers
|
||||
pkg_num = "%s" % len(self.builder.package_model.get_selected_packages())
|
||||
log_file = self.builder.current_logfile
|
||||
else:
|
||||
pkg_num = "N/A"
|
||||
log_file = None
|
||||
|
||||
# remove
|
||||
for button_id, button in self.button_ids.items():
|
||||
button.disconnect(button_id)
|
||||
self._remove_all_widget()
|
||||
|
||||
# repack
|
||||
self.pack_start(self.details_top_buttons, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
self.build_result = None
|
||||
if self.build_succeeded and self.builder.current_step == self.builder.IMAGE_GENERATING:
|
||||
if build_succeeded:
|
||||
# building is the previous step
|
||||
icon = gtk.Image()
|
||||
pixmap_path = hic.ICON_INDI_CONFIRM_FILE
|
||||
@@ -261,93 +186,49 @@ class ImageDetailsPage (HobPage):
|
||||
icon.set_from_pixbuf(pix_buffer)
|
||||
varlist = [""]
|
||||
vallist = ["Your image is ready"]
|
||||
self.build_result = self.BuildDetailBox(varlist=varlist, vallist=vallist, icon=icon, color=color)
|
||||
self.build_result = self.DetailBox(varlist=varlist, vallist=vallist, icon=icon, color=color)
|
||||
self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
|
||||
|
||||
# create the buttons at the bottom first because the buttons are used in apply_button_per_image()
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
self.buttonlist = ["Build new image", "Save as template", "Run image", "Deploy image"]
|
||||
else: # get to this page from "My images"
|
||||
self.buttonlist = ["Build new image", "Run image", "Deploy image"]
|
||||
|
||||
# Name
|
||||
self.image_store = []
|
||||
self.toggled_image = ""
|
||||
self.image_store.clear()
|
||||
default_toggled = False
|
||||
default_image_size = 0
|
||||
self.num_toggled = 0
|
||||
i = 0
|
||||
for image_name in image_names:
|
||||
image_size = HobPage._size_to_string(os.stat(os.path.join(image_addr, image_name)).st_size)
|
||||
|
||||
image_attr = ("run" if (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) else \
|
||||
("deploy" if self.test_deployable(image_name) else ""))
|
||||
is_toggled = (image_attr != "")
|
||||
|
||||
if not self.toggled_image:
|
||||
if not default_toggled:
|
||||
default_toggled = (self.test_type_runnable(image_name) and self.test_mach_runnable(image_name)) \
|
||||
or self.test_deployable(image_name)
|
||||
if i == (len(image_names) - 1):
|
||||
is_toggled = True
|
||||
if is_toggled:
|
||||
default_toggled = True
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, default_toggled)
|
||||
if default_toggled:
|
||||
default_image_size = image_size
|
||||
self.toggled_image = image_name
|
||||
|
||||
split_stuff = image_name.split('.')
|
||||
if "rootfs" in split_stuff:
|
||||
image_type = image_name[(len(split_stuff[0]) + len(".rootfs") + 1):]
|
||||
self.create_bottom_buttons(self.buttonlist, image_name)
|
||||
else:
|
||||
image_type = image_name[(len(split_stuff[0]) + 1):]
|
||||
|
||||
self.image_store.append({'name': image_name,
|
||||
'type': image_type,
|
||||
'size': image_size,
|
||||
'is_toggled': is_toggled,
|
||||
'action_attr': image_attr,})
|
||||
|
||||
self.image_store.set(self.image_store.append(), 0, image_name, 1, image_size, 2, False)
|
||||
i = i + 1
|
||||
self.num_toggled += is_toggled
|
||||
|
||||
is_runnable = self.create_bottom_buttons(self.buttonlist, self.toggled_image)
|
||||
|
||||
# Generated image files info
|
||||
varlist = ["Name: ", "Files created: ", "Directory: "]
|
||||
vallist = []
|
||||
|
||||
vallist.append(image_name.split('.')[0])
|
||||
vallist.append(', '.join(fileitem['type'] for fileitem in self.image_store))
|
||||
vallist.append(image_addr)
|
||||
|
||||
image_table = HobViewTable(self.__columns__)
|
||||
image_table.set_model(self.image_store)
|
||||
image_table.connect("toggled", self.toggled_cb)
|
||||
view_files_button = HobAltButton("View files")
|
||||
view_files_button.connect("clicked", self.view_files_clicked_cb, image_addr)
|
||||
view_files_button.set_tooltip_text("Open the directory containing the image files")
|
||||
open_log_button = None
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
self.image_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=view_files_button, button2=open_log_button)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=False, fill=True)
|
||||
|
||||
# The default kernel box for the qemu images
|
||||
self.sel_kernel = ""
|
||||
self.kernel_detail = None
|
||||
if 'qemu' in image_name:
|
||||
self.sel_kernel = self.get_kernel_file_name()
|
||||
|
||||
# varlist = ["Kernel: "]
|
||||
# vallist = []
|
||||
# vallist.append(self.sel_kernel)
|
||||
|
||||
# change_kernel_button = HobAltButton("Change")
|
||||
# change_kernel_button.connect("clicked", self.change_kernel_cb)
|
||||
# change_kernel_button.set_tooltip_text("Change qemu kernel file")
|
||||
# self.kernel_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=change_kernel_button)
|
||||
# self.box_group_area.pack_start(self.kernel_detail, expand=True, fill=True)
|
||||
self.image_detail = self.DetailBox(widget=image_table, button=view_files_button)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=True, fill=True)
|
||||
|
||||
# Machine, Base image and Layers
|
||||
layer_num_limit = 15
|
||||
varlist = ["Machine: ", "Base image: ", "Layers: "]
|
||||
vallist = []
|
||||
self.setting_detail = None
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
vallist.append(machine)
|
||||
vallist.append(base_image)
|
||||
i = 0
|
||||
@@ -371,40 +252,29 @@ class ImageDetailsPage (HobPage):
|
||||
edit_config_button.set_tooltip_text("Edit machine, base image and recipes")
|
||||
edit_config_button.connect("clicked", self.edit_config_button_clicked_cb)
|
||||
self.setting_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_config_button)
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=False, fill=False)
|
||||
|
||||
# Packages included, and Total image size
|
||||
varlist = ["Packages included: ", "Total image size: "]
|
||||
vallist = []
|
||||
vallist.append(pkg_num)
|
||||
vallist.append(default_image_size)
|
||||
if self.build_succeeded:
|
||||
if build_succeeded:
|
||||
edit_packages_button = HobAltButton("Edit packages")
|
||||
edit_packages_button.set_tooltip_text("Edit the packages included in your image")
|
||||
edit_packages_button.connect("clicked", self.edit_packages_button_clicked_cb)
|
||||
else: # get to this page from "My images"
|
||||
edit_packages_button = None
|
||||
self.package_detail = self.DetailBox(varlist=varlist, vallist=vallist, button=edit_packages_button)
|
||||
self.box_group_area.pack_start(self.package_detail, expand=True, fill=True)
|
||||
self.box_group_area.pack_start(self.package_detail, expand=False, fill=False)
|
||||
|
||||
# pack the buttons at the bottom, at this time they are already created.
|
||||
if self.build_succeeded:
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
else: # for "My images" page
|
||||
self.details_separator = gtk.HSeparator()
|
||||
self.box_group_area.pack_start(self.details_separator, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.details_bottom_buttons, expand=False, fill=False)
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
|
||||
self.show_all()
|
||||
if self.kernel_detail and (not is_runnable):
|
||||
self.kernel_detail.hide()
|
||||
|
||||
def view_files_clicked_cb(self, button, image_addr):
|
||||
subprocess.call("xdg-open /%s" % image_addr, shell=True)
|
||||
|
||||
def open_log_clicked_cb(self, button, log_file):
|
||||
if log_file:
|
||||
os.system("xdg-open /%s" % log_file)
|
||||
os.system("xdg-open /%s" % image_addr)
|
||||
|
||||
def refresh_package_detail_box(self, image_size):
|
||||
self.package_detail.update_line_widgets("Total image size: ", image_size)
|
||||
@@ -426,8 +296,6 @@ class ImageDetailsPage (HobPage):
|
||||
return mach_runnable
|
||||
|
||||
def test_deployable(self, image_name):
|
||||
if self.builder.configuration.curr_mach.startswith("qemu"):
|
||||
return False
|
||||
deployable = False
|
||||
for t in self.builder.parameters.deployable_image_types:
|
||||
if image_name.endswith(t):
|
||||
@@ -435,121 +303,49 @@ class ImageDetailsPage (HobPage):
|
||||
break
|
||||
return deployable
|
||||
|
||||
def get_kernel_file_name(self, kernel_addr=""):
|
||||
kernel_name = ""
|
||||
|
||||
if not kernel_addr:
|
||||
kernel_addr = self.builder.parameters.image_addr
|
||||
|
||||
files = [f for f in os.listdir(kernel_addr) if f[0] <> '.']
|
||||
for check_file in files:
|
||||
if check_file.endswith(".bin"):
|
||||
name_splits = check_file.split(".")[0]
|
||||
if self.builder.parameters.kernel_image_type in name_splits.split("-"):
|
||||
kernel_name = check_file
|
||||
break
|
||||
|
||||
return kernel_name
|
||||
|
||||
def show_builded_images_dialog(self, widget, primary_action=""):
|
||||
title = primary_action if primary_action else "Your builded images"
|
||||
dialog = CrumbsDialog(title, self.builder,
|
||||
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
dialog.set_border_width(12)
|
||||
|
||||
label = gtk.Label()
|
||||
label.set_use_markup(True)
|
||||
label.set_alignment(0.0, 0.5)
|
||||
label.set_padding(12,0)
|
||||
if primary_action == "Run image":
|
||||
label.set_markup("<span font_desc='12'>Select the image file you want to run:</span>")
|
||||
elif primary_action == "Deploy image":
|
||||
label.set_markup("<span font_desc='12'>Select the image file you want to deploy:</span>")
|
||||
else:
|
||||
label.set_markup("<span font_desc='12'>Select the image file you want to %s</span>" % primary_action)
|
||||
dialog.vbox.pack_start(label, expand=False, fill=False)
|
||||
|
||||
# filter created images as action attribution (deploy or run)
|
||||
action_attr = ""
|
||||
action_images = []
|
||||
for fileitem in self.image_store:
|
||||
action_attr = fileitem['action_attr']
|
||||
if (action_attr == 'run' and primary_action == "Run image") \
|
||||
or (action_attr == 'deploy' and primary_action == "Deploy image"):
|
||||
action_images.append(fileitem)
|
||||
|
||||
# pack the corresponding 'runnable' or 'deploy' radio_buttons, if there has no more than one file.
|
||||
# assume that there does not both have 'deploy' and 'runnable' files in the same building result
|
||||
# in possible as design.
|
||||
curr_row = 0
|
||||
rows = (len(action_images)) if len(action_images) < 10 else 10
|
||||
table = gtk.Table(rows, 10, True)
|
||||
table.set_row_spacings(6)
|
||||
table.set_col_spacing(0, 12)
|
||||
table.set_col_spacing(5, 12)
|
||||
|
||||
sel_parent_btn = None
|
||||
for fileitem in action_images:
|
||||
sel_btn = gtk.RadioButton(sel_parent_btn, fileitem['type'])
|
||||
sel_parent_btn = sel_btn if not sel_parent_btn else sel_parent_btn
|
||||
sel_btn.set_active(fileitem['is_toggled'])
|
||||
sel_btn.connect('toggled', self.table_selected_cb, fileitem)
|
||||
if curr_row < 10:
|
||||
table.attach(sel_btn, 0, 4, curr_row, curr_row + 1, xpadding=24)
|
||||
else:
|
||||
table.attach(sel_btn, 5, 9, curr_row - 10, curr_row - 9, xpadding=24)
|
||||
curr_row += 1
|
||||
|
||||
dialog.vbox.pack_start(table, expand=False, fill=False, padding=6)
|
||||
|
||||
button = dialog.add_button("Cancel", gtk.RESPONSE_CANCEL)
|
||||
HobAltButton.style_button(button)
|
||||
|
||||
if primary_action:
|
||||
button = dialog.add_button(primary_action, gtk.RESPONSE_YES)
|
||||
HobButton.style_button(button)
|
||||
|
||||
dialog.show_all()
|
||||
|
||||
response = dialog.run()
|
||||
dialog.destroy()
|
||||
|
||||
if response != gtk.RESPONSE_YES:
|
||||
def toggled_cb(self, table, cell, path, columnid, tree):
|
||||
model = tree.get_model()
|
||||
if not model:
|
||||
return
|
||||
iter = model.get_iter_first()
|
||||
while iter:
|
||||
rowpath = model.get_path(iter)
|
||||
model[rowpath][columnid] = False
|
||||
iter = model.iter_next(iter)
|
||||
|
||||
for fileitem in self.image_store:
|
||||
if fileitem['is_toggled']:
|
||||
if fileitem['action_attr'] == 'run':
|
||||
self.builder.runqemu_image(fileitem['name'], self.sel_kernel)
|
||||
elif fileitem['action_attr'] == 'deploy':
|
||||
self.builder.deploy_image(fileitem['name'])
|
||||
model[path][columnid] = True
|
||||
self.refresh_package_detail_box(model[path][1])
|
||||
|
||||
def table_selected_cb(self, tbutton, image):
|
||||
image['is_toggled'] = tbutton.get_active()
|
||||
if image['is_toggled']:
|
||||
self.toggled_image = image['name']
|
||||
image_name = model[path][0]
|
||||
|
||||
def change_kernel_cb(self, widget):
|
||||
kernel_path = self.builder.show_load_kernel_dialog()
|
||||
if kernel_path and self.kernel_detail:
|
||||
import os.path
|
||||
self.sel_kernel = os.path.basename(kernel_path)
|
||||
markup = self.kernel_detail.format_line("Kernel: ", self.sel_kernel)
|
||||
label = ((self.kernel_detail.get_children()[0]).get_children()[0]).get_children()[0]
|
||||
label.set_markup(markup)
|
||||
# remove
|
||||
for button_id, button in self.button_ids.items():
|
||||
button.disconnect(button_id)
|
||||
self._remove_all_widget()
|
||||
# repack
|
||||
self.pack_start(self.details_top_buttons, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
if self.build_result:
|
||||
self.box_group_area.pack_start(self.build_result, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.image_detail, expand=True, fill=True)
|
||||
if self.setting_detail:
|
||||
self.box_group_area.pack_start(self.setting_detail, expand=False, fill=False)
|
||||
self.box_group_area.pack_start(self.package_detail, expand=False, fill=False)
|
||||
self.create_bottom_buttons(self.buttonlist, image_name)
|
||||
self.box_group_area.pack_end(self.details_bottom_buttons, expand=False, fill=False)
|
||||
self.show_all()
|
||||
|
||||
def create_bottom_buttons(self, buttonlist, image_name):
|
||||
# Create the buttons at the bottom
|
||||
created = False
|
||||
packed = False
|
||||
self.button_ids = {}
|
||||
is_runnable = False
|
||||
|
||||
# create button "Deploy image"
|
||||
name = "Deploy image"
|
||||
if name in buttonlist and self.test_deployable(image_name):
|
||||
deploy_button = HobButton('Deploy image')
|
||||
#deploy_button.set_size_request(205, 49)
|
||||
deploy_button.set_size_request(205, 49)
|
||||
deploy_button.set_tooltip_text("Burn a live image to a USB drive or flash memory")
|
||||
deploy_button.set_flags(gtk.CAN_DEFAULT)
|
||||
button_id = deploy_button.connect("clicked", self.deploy_button_clicked_cb)
|
||||
@@ -562,15 +358,15 @@ class ImageDetailsPage (HobPage):
|
||||
if name in buttonlist and self.test_type_runnable(image_name) and self.test_mach_runnable(image_name):
|
||||
if created == True:
|
||||
# separator
|
||||
#label = gtk.Label(" or ")
|
||||
#self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
label = gtk.Label(" or ")
|
||||
self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Run image"
|
||||
run_button = HobAltButton("Run image")
|
||||
else:
|
||||
# create button "Run image" as the primary button
|
||||
run_button = HobButton("Run image")
|
||||
#run_button.set_size_request(205, 49)
|
||||
run_button.set_size_request(205, 49)
|
||||
run_button.set_flags(gtk.CAN_DEFAULT)
|
||||
packed = True
|
||||
run_button.set_tooltip_text("Start up an image with qemu emulator")
|
||||
@@ -578,22 +374,25 @@ class ImageDetailsPage (HobPage):
|
||||
self.button_ids[button_id] = run_button
|
||||
self.details_bottom_buttons.pack_end(run_button, expand=False, fill=False)
|
||||
created = True
|
||||
is_runnable = True
|
||||
|
||||
if not packed:
|
||||
box = gtk.HBox(False, 6)
|
||||
box.show()
|
||||
subbox = gtk.HBox(False, 0)
|
||||
subbox.set_size_request(205, 49)
|
||||
subbox.show()
|
||||
box.add(subbox)
|
||||
self.details_bottom_buttons.pack_end(box, False, False)
|
||||
|
||||
name = "Save as template"
|
||||
if name in buttonlist:
|
||||
if created == True:
|
||||
# separator
|
||||
#label = gtk.Label(" or ")
|
||||
#self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
label = gtk.Label(" or ")
|
||||
self.details_bottom_buttons.pack_end(label, expand=False, fill=False)
|
||||
|
||||
# create button "Save as template"
|
||||
save_button = HobAltButton("Save as template")
|
||||
else:
|
||||
save_button = HobButton("Save as template")
|
||||
#save_button.set_size_request(205, 49)
|
||||
save_button.set_flags(gtk.CAN_DEFAULT)
|
||||
packed = True
|
||||
# create button "Save as template"
|
||||
save_button = HobAltButton("Save as template")
|
||||
save_button.set_tooltip_text("Save the image configuration for reuse")
|
||||
button_id = save_button.connect("clicked", self.save_button_clicked_cb)
|
||||
self.button_ids[button_id] = save_button
|
||||
@@ -603,39 +402,34 @@ class ImageDetailsPage (HobPage):
|
||||
name = "Build new image"
|
||||
if name in buttonlist:
|
||||
# create button "Build new image"
|
||||
if packed:
|
||||
build_new_button = HobAltButton("Build new image")
|
||||
else:
|
||||
build_new_button = HobButton("Build new image")
|
||||
build_new_button.set_flags(gtk.CAN_DEFAULT)
|
||||
#build_new_button.set_size_request(205, 49)
|
||||
self.details_bottom_buttons.pack_end(build_new_button, expand=False, fill=False)
|
||||
build_new_button = HobAltButton("Build new image")
|
||||
build_new_button.set_tooltip_text("Create a new image from scratch")
|
||||
button_id = build_new_button.connect("clicked", self.build_new_button_clicked_cb)
|
||||
self.button_ids[button_id] = build_new_button
|
||||
self.details_bottom_buttons.pack_start(build_new_button, expand=False, fill=False)
|
||||
|
||||
return is_runnable
|
||||
def _get_selected_image(self):
|
||||
image_name = ""
|
||||
iter = self.image_store.get_iter_first()
|
||||
while iter:
|
||||
path = self.image_store.get_path(iter)
|
||||
if self.image_store[path][2]:
|
||||
image_name = self.image_store[path][0]
|
||||
break
|
||||
iter = self.image_store.iter_next(iter)
|
||||
|
||||
return image_name
|
||||
|
||||
def save_button_clicked_cb(self, button):
|
||||
self.builder.show_save_template_dialog()
|
||||
|
||||
def deploy_button_clicked_cb(self, button):
|
||||
if self.toggled_image:
|
||||
if self.num_toggled > 1:
|
||||
self.set_sensitive(False)
|
||||
self.show_builded_images_dialog(None, "Deploy image")
|
||||
self.set_sensitive(True)
|
||||
else:
|
||||
self.builder.deploy_image(self.toggled_image)
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.deploy_image(image_name)
|
||||
|
||||
def run_button_clicked_cb(self, button):
|
||||
if self.toggled_image:
|
||||
if self.num_toggled > 1:
|
||||
self.set_sensitive(False)
|
||||
self.show_builded_images_dialog(None, "Run image")
|
||||
self.set_sensitive(True)
|
||||
else:
|
||||
self.builder.runqemu_image(self.toggled_image, self.sel_kernel)
|
||||
image_name = self._get_selected_image()
|
||||
self.builder.runqemu_image(image_name)
|
||||
|
||||
def build_new_button_clicked_cb(self, button):
|
||||
self.builder.initiate_new_build_async()
|
||||
@@ -658,7 +452,7 @@ class ImageDetailsPage (HobPage):
|
||||
|
||||
def settings_button_clicked_cb(self, button):
|
||||
# Create an advanced settings dialog
|
||||
response, settings_changed = self.builder.show_simple_settings_dialog()
|
||||
response, settings_changed = self.builder.show_adv_settings_dialog()
|
||||
if not response:
|
||||
return
|
||||
if settings_changed:
|
||||
|
||||
@@ -34,8 +34,7 @@ class PackageSelectionPage (HobPage):
|
||||
|
||||
pages = [
|
||||
{
|
||||
'name' : 'Included packages',
|
||||
'tooltip' : 'The packages currently included for your image',
|
||||
'name' : 'Included',
|
||||
'filter' : { PackageListModel.COL_INC : [True] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
@@ -44,13 +43,6 @@ class PackageSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Brought in by',
|
||||
'col_id' : PackageListModel.COL_BINB,
|
||||
@@ -58,6 +50,13 @@ class PackageSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 350,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Size',
|
||||
'col_id' : PackageListModel.COL_SIZE,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : PackageListModel.COL_INC,
|
||||
@@ -67,7 +66,6 @@ class PackageSelectionPage (HobPage):
|
||||
}]
|
||||
}, {
|
||||
'name' : 'All packages',
|
||||
'tooltip' : 'All packages that have been built',
|
||||
'filter' : {},
|
||||
'columns' : [{
|
||||
'col_name' : 'Package name',
|
||||
@@ -92,12 +90,9 @@ class PackageSelectionPage (HobPage):
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
(INCLUDED,
|
||||
ALL) = range(2)
|
||||
|
||||
def __init__(self, builder):
|
||||
super(PackageSelectionPage, self).__init__(builder, "Edit packages")
|
||||
super(PackageSelectionPage, self).__init__(builder, "Packages")
|
||||
|
||||
# set invisiable members
|
||||
self.recipe_model = self.builder.recipe_model
|
||||
@@ -106,16 +101,13 @@ class PackageSelectionPage (HobPage):
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(self.INCLUDED)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.label = gtk.Label("Packages included: 0\nSelected packages size: 0 MB")
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visible members
|
||||
# set visiable members
|
||||
self.ins = HobNotebook()
|
||||
self.tables = [] # we need to modify table when the dialog is shown
|
||||
# append the tab
|
||||
@@ -125,37 +117,35 @@ class PackageSelectionPage (HobPage):
|
||||
filter = page['filter']
|
||||
tab.set_model(self.package_model.tree_model(filter))
|
||||
tab.connect("toggled", self.table_toggled_cb, page['name'])
|
||||
if page['name'] == "Included packages":
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'], page['tooltip'])
|
||||
label = gtk.Label(page['name'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search packages:")
|
||||
# set the search entry for each table
|
||||
for tab in self.tables:
|
||||
search_tip = "Enter a package name to find it"
|
||||
self.ins.search.set_tooltip_text(search_tip)
|
||||
self.ins.search.props.has_tooltip = True
|
||||
tab.set_search_entry(0, self.ins.search)
|
||||
|
||||
# add all into the dialog
|
||||
self.box_group_area.pack_start(self.ins, expand=True, fill=True)
|
||||
|
||||
self.button_box = gtk.HBox(False, 6)
|
||||
self.box_group_area.pack_start(self.button_box, expand=False, fill=False)
|
||||
button_box = gtk.HBox(False, 6)
|
||||
self.box_group_area.pack_start(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_image_button = HobButton('Build image')
|
||||
#self.build_image_button.set_size_request(205, 49)
|
||||
self.build_image_button.set_size_request(205, 49)
|
||||
self.build_image_button.set_tooltip_text("Build target image")
|
||||
self.build_image_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_image_button.grab_default()
|
||||
self.build_image_button.connect("clicked", self.build_image_clicked_cb)
|
||||
self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
|
||||
button_box.pack_end(self.build_image_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = HobAltButton('Cancel')
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
self.button_box.pack_end(self.back_button, expand=False, fill=False)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def button_click_cb(self, widget, event):
|
||||
path, col = widget.table_tree.get_cursor()
|
||||
@@ -165,34 +155,11 @@ class PackageSelectionPage (HobPage):
|
||||
if binb:
|
||||
self.builder.show_binb_dialog(binb)
|
||||
|
||||
def open_log_clicked_cb(self, button, log_file):
|
||||
if log_file:
|
||||
os.system("xdg-open /%s" % log_file)
|
||||
|
||||
def show_page(self, log_file):
|
||||
children = self.button_box.get_children() or []
|
||||
for child in children:
|
||||
self.button_box.remove(child)
|
||||
# re-packed the buttons as request, add the 'open log' button if build success
|
||||
self.button_box.pack_end(self.build_image_button, expand=False, fill=False)
|
||||
if log_file:
|
||||
open_log_button = HobAltButton("Open log")
|
||||
open_log_button.connect("clicked", self.open_log_clicked_cb, log_file)
|
||||
open_log_button.set_tooltip_text("Open the build's log file")
|
||||
self.button_box.pack_end(open_log_button, expand=False, fill=False)
|
||||
self.button_box.pack_end(self.back_button, expand=False, fill=False)
|
||||
self.show_all()
|
||||
|
||||
def build_image_clicked_cb(self, button):
|
||||
self.builder.build_image()
|
||||
|
||||
def back_button_clicked_cb(self, button):
|
||||
if self.builder.previous_step == self.builder.IMAGE_GENERATED:
|
||||
self.builder.restore_initial_selected_packages()
|
||||
self.refresh_selection()
|
||||
self.builder.show_image_details()
|
||||
else:
|
||||
self.builder.show_configuration()
|
||||
self.builder.show_configuration()
|
||||
|
||||
def _expand_all(self):
|
||||
for tab in self.tables:
|
||||
@@ -216,15 +183,15 @@ class PackageSelectionPage (HobPage):
|
||||
image_total_size += (51200 * 1024)
|
||||
image_total_size_str = HobPage._size_to_string(image_total_size)
|
||||
|
||||
self.label.set_label("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
self.label.set_text("Packages included: %s\nSelected packages size: %s\nTotal image size: %s" %
|
||||
(selected_packages_num, selected_packages_size_str, image_total_size_str))
|
||||
self.ins.show_indicator_icon("Included packages", selected_packages_num)
|
||||
self.ins.show_indicator_icon("Included", selected_packages_num)
|
||||
|
||||
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
|
||||
if not self.package_model.path_included(path):
|
||||
self.package_model.include_item(item_path=path, binb="User Selected")
|
||||
else:
|
||||
if pagename == "Included packages":
|
||||
if pagename == "Included":
|
||||
self.pre_fadeout_checkout_include(view_tree)
|
||||
self.package_model.exclude_item(item_path=path)
|
||||
self.render_fadeout(view_tree, cell)
|
||||
@@ -234,7 +201,7 @@ class PackageSelectionPage (HobPage):
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.configuration.selected_image = self.recipe_model.__dummy_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
@@ -280,7 +247,3 @@ class PackageSelectionPage (HobPage):
|
||||
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
|
||||
tree.set_model(self.package_model.tree_model(self.pages[0]['filter']))
|
||||
tree.expand_all()
|
||||
|
||||
def set_packages_curr_tab(self, curr_page):
|
||||
self.ins.set_current_page(curr_page)
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ class PersistentTooltip(gtk.Window):
|
||||
|
||||
markup: some Pango text markup to display in the tooltip
|
||||
"""
|
||||
def __init__(self, markup, parent_win=None):
|
||||
def __init__(self, markup):
|
||||
gtk.Window.__init__(self, gtk.WINDOW_POPUP)
|
||||
|
||||
# Inherit the system theme for a tooltip
|
||||
@@ -76,10 +76,6 @@ class PersistentTooltip(gtk.Window):
|
||||
# Ensure a reasonable minimum size
|
||||
self.set_geometry_hints(self, 100, 50)
|
||||
|
||||
# Set this window as a transient window for parent(main window)
|
||||
if parent_win:
|
||||
self.set_transient_for(parent_win)
|
||||
self.set_destroy_with_parent(True)
|
||||
# Draw our label and close buttons
|
||||
hbox = gtk.HBox(False, 0)
|
||||
hbox.show()
|
||||
@@ -125,17 +121,11 @@ class PersistentTooltip(gtk.Window):
|
||||
style.fg[gtk.STATE_NORMAL] = gtk.gdk.color_parse(val)
|
||||
self.label.set_style(style)
|
||||
break # we only care for the tooltip_fg_color
|
||||
|
||||
self.label.set_markup(markup)
|
||||
self.label.show()
|
||||
bin.add(self.label)
|
||||
hbox.pack_end(bin, True, True, 6)
|
||||
|
||||
# add the original URL display for user reference
|
||||
if 'a href' in markup:
|
||||
hbox.set_tooltip_text(self.get_markup_url(markup))
|
||||
hbox.show()
|
||||
|
||||
self.connect("key-press-event", self._catch_esc_cb)
|
||||
|
||||
"""
|
||||
@@ -171,16 +161,3 @@ class PersistentTooltip(gtk.Window):
|
||||
def hide(self):
|
||||
self.shown = False
|
||||
gtk.Window.hide(self)
|
||||
|
||||
"""
|
||||
Called to get the hyperlink URL from markup text.
|
||||
"""
|
||||
def get_markup_url(self, markup):
|
||||
url = "http:"
|
||||
if markup and type(markup) == str:
|
||||
s = markup
|
||||
if 'http:' in s:
|
||||
import re
|
||||
url = re.search('(http:[^,\\ "]+)', s).group(0)
|
||||
|
||||
return url
|
||||
|
||||
@@ -11,9 +11,6 @@ class ProgressBar(gtk.Dialog):
|
||||
self.vbox.pack_start(self.progress)
|
||||
self.show_all()
|
||||
|
||||
def set_text(self, msg):
|
||||
self.progress.set_text(msg)
|
||||
|
||||
def update(self, x, y):
|
||||
self.progress.set_fraction(float(x)/float(y))
|
||||
self.progress.set_text("%2d %%" % (x*100/y))
|
||||
|
||||
@@ -33,10 +33,10 @@ from bb.ui.crumbs.hobpages import HobPage
|
||||
class RecipeSelectionPage (HobPage):
|
||||
pages = [
|
||||
{
|
||||
'name' : 'Included recipes',
|
||||
'name' : 'Included',
|
||||
'tooltip' : 'The recipes currently included for your image',
|
||||
'filter' : { RecipeListModel.COL_INC : [True],
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] },
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe name',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
@@ -44,13 +44,6 @@ class RecipeSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Brought in by',
|
||||
'col_id' : RecipeListModel.COL_BINB,
|
||||
@@ -58,6 +51,13 @@ class RecipeSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 500,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 300,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
@@ -67,7 +67,7 @@ class RecipeSelectionPage (HobPage):
|
||||
}]
|
||||
}, {
|
||||
'name' : 'All recipes',
|
||||
'tooltip' : 'All recipes in your configured layers',
|
||||
'tooltip' : 'All recipes available in the Yocto Project',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Recipe name',
|
||||
@@ -76,13 +76,6 @@ class RecipeSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'License',
|
||||
'col_id' : RecipeListModel.COL_LIC,
|
||||
@@ -90,6 +83,13 @@ class RecipeSelectionPage (HobPage):
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Group',
|
||||
'col_id' : RecipeListModel.COL_GROUP,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
@@ -98,16 +98,23 @@ class RecipeSelectionPage (HobPage):
|
||||
'col_max' : 100
|
||||
}]
|
||||
}, {
|
||||
'name' : 'Package Groups',
|
||||
'tooltip' : 'All package groups in your configured layers',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['packagegroup'] },
|
||||
'name' : 'Tasks',
|
||||
'tooltip' : 'All tasks availabel in the Yocto Project',
|
||||
'filter' : { RecipeListModel.COL_TYPE : ['task'] },
|
||||
'columns' : [{
|
||||
'col_name' : 'Package group name',
|
||||
'col_name' : 'Task name',
|
||||
'col_id' : RecipeListModel.COL_NAME,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Description',
|
||||
'col_id' : RecipeListModel.COL_DESC,
|
||||
'col_style': 'text',
|
||||
'col_min' : 100,
|
||||
'col_max' : 400,
|
||||
'expand' : 'True'
|
||||
}, {
|
||||
'col_name' : 'Included',
|
||||
'col_id' : RecipeListModel.COL_INC,
|
||||
@@ -117,29 +124,23 @@ class RecipeSelectionPage (HobPage):
|
||||
}]
|
||||
}
|
||||
]
|
||||
|
||||
(INCLUDED,
|
||||
ALL,
|
||||
TASKS) = range(3)
|
||||
|
||||
def __init__(self, builder = None):
|
||||
super(RecipeSelectionPage, self).__init__(builder, "Step 1 of 2: Edit recipes")
|
||||
super(RecipeSelectionPage, self).__init__(builder, "Recipes")
|
||||
|
||||
# set invisible members
|
||||
# set invisiable members
|
||||
self.recipe_model = self.builder.recipe_model
|
||||
|
||||
# create visual elements
|
||||
self.create_visual_elements()
|
||||
|
||||
def included_clicked_cb(self, button):
|
||||
self.ins.set_current_page(self.INCLUDED)
|
||||
|
||||
def create_visual_elements(self):
|
||||
self.eventbox = self.add_onto_top_bar(None, 73)
|
||||
self.label = gtk.Label()
|
||||
self.eventbox = self.add_onto_top_bar(self.label, 73)
|
||||
self.pack_start(self.eventbox, expand=False, fill=False)
|
||||
self.pack_start(self.group_align, expand=True, fill=True)
|
||||
|
||||
# set visible members
|
||||
# set visiable members
|
||||
self.ins = HobNotebook()
|
||||
self.tables = [] # we need modify table when the dialog is shown
|
||||
# append the tabs in order
|
||||
@@ -149,10 +150,13 @@ class RecipeSelectionPage (HobPage):
|
||||
filter = page['filter']
|
||||
tab.set_model(self.recipe_model.tree_model(filter))
|
||||
tab.connect("toggled", self.table_toggled_cb, page['name'])
|
||||
if page['name'] == "Included recipes":
|
||||
if page['name'] == "Included":
|
||||
tab.connect("button-release-event", self.button_click_cb)
|
||||
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
|
||||
self.ins.append_page(tab, page['name'], page['tooltip'])
|
||||
label = gtk.Label(page['name'])
|
||||
label.set_selectable(False)
|
||||
label.set_tooltip_text(page['tooltip'])
|
||||
self.ins.append_page(tab, label)
|
||||
self.tables.append(tab)
|
||||
|
||||
self.ins.set_entry("Search recipes:")
|
||||
@@ -170,16 +174,16 @@ class RecipeSelectionPage (HobPage):
|
||||
self.box_group_area.pack_end(button_box, expand=False, fill=False)
|
||||
|
||||
self.build_packages_button = HobButton('Build packages')
|
||||
#self.build_packages_button.set_size_request(205, 49)
|
||||
self.build_packages_button.set_size_request(205, 49)
|
||||
self.build_packages_button.set_tooltip_text("Build selected recipes into packages")
|
||||
self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
|
||||
self.build_packages_button.grab_default()
|
||||
self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
|
||||
button_box.pack_end(self.build_packages_button, expand=False, fill=False)
|
||||
|
||||
self.back_button = HobAltButton('Cancel')
|
||||
self.back_button = HobAltButton("<< Back to image configuration")
|
||||
self.back_button.connect("clicked", self.back_button_clicked_cb)
|
||||
button_box.pack_end(self.back_button, expand=False, fill=False)
|
||||
button_box.pack_start(self.back_button, expand=False, fill=False)
|
||||
|
||||
def button_click_cb(self, widget, event):
|
||||
path, col = widget.table_tree.get_cursor()
|
||||
@@ -198,13 +202,14 @@ class RecipeSelectionPage (HobPage):
|
||||
def refresh_selection(self):
|
||||
self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
|
||||
_, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
|
||||
self.ins.show_indicator_icon("Included recipes", len(self.builder.configuration.selected_recipes))
|
||||
self.label.set_text("Recipes included: %s" % len(self.builder.configuration.selected_recipes))
|
||||
self.ins.show_indicator_icon("Included", len(self.builder.configuration.selected_recipes))
|
||||
|
||||
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
|
||||
if not self.recipe_model.path_included(path):
|
||||
self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
|
||||
else:
|
||||
if pagename == "Included recipes":
|
||||
if pagename == "Included":
|
||||
self.pre_fadeout_checkout_include(view_tree)
|
||||
self.recipe_model.exclude_item(item_path=path)
|
||||
self.render_fadeout(view_tree, cell)
|
||||
@@ -214,7 +219,7 @@ class RecipeSelectionPage (HobPage):
|
||||
self.refresh_selection()
|
||||
if not self.builder.customized:
|
||||
self.builder.customized = True
|
||||
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
|
||||
self.builder.configuration.selected_image = self.recipe_model.__dummy_image__
|
||||
self.builder.rcppkglist_populated()
|
||||
|
||||
self.builder.window_sensitive(True)
|
||||
@@ -236,7 +241,7 @@ class RecipeSelectionPage (HobPage):
|
||||
# Check out a model which base on the column COL_FADE_INC,
|
||||
# it's save the prev state of column COL_INC before do exclude_item
|
||||
filter = { RecipeListModel.COL_FADE_INC : [True],
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] }
|
||||
RecipeListModel.COL_TYPE : ['recipe', 'task'] }
|
||||
new_model = self.recipe_model.tree_model(filter, excluded_items_ahead=True)
|
||||
tree.set_model(new_model)
|
||||
|
||||
@@ -258,6 +263,3 @@ class RecipeSelectionPage (HobPage):
|
||||
|
||||
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
|
||||
tree.set_model(self.recipe_model.tree_model(self.pages[0]['filter']))
|
||||
|
||||
def set_recipe_curr_tab(self, curr_page):
|
||||
self.ins.set_current_page(curr_page)
|
||||
|
||||
@@ -76,21 +76,12 @@ class RunningBuild (gobject.GObject):
|
||||
'build-complete' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'build-aborted' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'task-started' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'log-error' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
()),
|
||||
'no-provider' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_PYOBJECT,)),
|
||||
'log' : (gobject.SIGNAL_RUN_LAST,
|
||||
gobject.TYPE_NONE,
|
||||
(gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,)),
|
||||
}
|
||||
pids_to_task = {}
|
||||
tasks_to_iter = {}
|
||||
@@ -99,7 +90,6 @@ class RunningBuild (gobject.GObject):
|
||||
gobject.GObject.__init__ (self)
|
||||
self.model = RunningBuildModel()
|
||||
self.sequential = sequential
|
||||
self.buildaborted = False
|
||||
|
||||
def reset (self):
|
||||
self.pids_to_task.clear()
|
||||
@@ -129,8 +119,6 @@ class RunningBuild (gobject.GObject):
|
||||
parent = self.tasks_to_iter[(package, task)]
|
||||
|
||||
if(isinstance(event, logging.LogRecord)):
|
||||
if event.taskpid == 0 or event.levelno > logging.INFO:
|
||||
self.emit("log", "handle", event)
|
||||
# FIXME: this is a hack! More info in Yocto #1433
|
||||
# http://bugzilla.pokylinux.org/show_bug.cgi?id=1433, temporarily
|
||||
# mask the error message as it's not informative for the user.
|
||||
@@ -216,7 +204,6 @@ class RunningBuild (gobject.GObject):
|
||||
self.tasks_to_iter[(package, task)] = i
|
||||
|
||||
elif isinstance(event, bb.build.TaskBase):
|
||||
self.emit("log", "info", event._message)
|
||||
current = self.tasks_to_iter[(package, task)]
|
||||
parent = self.tasks_to_iter[(package, None)]
|
||||
|
||||
@@ -284,9 +271,7 @@ class RunningBuild (gobject.GObject):
|
||||
0))
|
||||
|
||||
# Emit the appropriate signal depending on the number of failures
|
||||
if self.buildaborted:
|
||||
self.emit ("build-aborted")
|
||||
elif (failures >= 1):
|
||||
if (failures >= 1):
|
||||
self.emit ("build-failed")
|
||||
else:
|
||||
self.emit ("build-succeeded")
|
||||
@@ -298,11 +283,7 @@ class RunningBuild (gobject.GObject):
|
||||
if pbar:
|
||||
pbar.set_text(event.msg)
|
||||
|
||||
elif isinstance(event, bb.event.DiskFull):
|
||||
self.buildaborted = True
|
||||
|
||||
elif isinstance(event, bb.command.CommandFailed):
|
||||
self.emit("log", "error", "Command execution failed: %s" % (event.error))
|
||||
if event.error.startswith("Exited with"):
|
||||
# If the command fails with an exit code we're done, emit the
|
||||
# generic signal for the UI to notify the user
|
||||
@@ -330,24 +311,7 @@ class RunningBuild (gobject.GObject):
|
||||
elif isinstance(event, bb.event.ParseCompleted) and pbar:
|
||||
pbar.hide()
|
||||
#using runqueue events as many as possible to update the progress bar
|
||||
elif isinstance(event, bb.runqueue.runQueueTaskFailed):
|
||||
self.emit("log", "error", "Task %s (%s) failed with exit code '%s'" % (event.taskid, event.taskstring, event.exitcode))
|
||||
elif isinstance(event, bb.runqueue.sceneQueueTaskFailed):
|
||||
self.emit("log", "warn", "Setscene task %s (%s) failed with exit code '%s' - real task will be run instead" \
|
||||
% (event.taskid, event.taskstring, event.exitcode))
|
||||
elif isinstance(event, (bb.runqueue.runQueueTaskStarted, bb.runqueue.sceneQueueTaskStarted)):
|
||||
if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
|
||||
self.emit("log", "info", "Running setscene task %d of %d (%s)" % \
|
||||
(event.stats.completed + event.stats.active + event.stats.failed + 1,
|
||||
event.stats.total, event.taskstring))
|
||||
else:
|
||||
if event.noexec:
|
||||
tasktype = 'noexec task'
|
||||
else:
|
||||
tasktype = 'task'
|
||||
self.emit("log", "info", "Running %s %s of %s (ID: %s, %s)" % \
|
||||
(tasktype, event.stats.completed + event.stats.active + event.stats.failed + 1,
|
||||
event.stats.total, event.taskid, event.taskstring))
|
||||
message = {}
|
||||
message["eventname"] = bb.event.getName(event)
|
||||
num_of_completed = event.stats.completed + event.stats.failed
|
||||
@@ -356,52 +320,6 @@ class RunningBuild (gobject.GObject):
|
||||
message["title"] = ""
|
||||
message["task"] = event.taskstring
|
||||
self.emit("task-started", message)
|
||||
elif isinstance(event, bb.event.MultipleProviders):
|
||||
self.emit("log", "info", "multiple providers are available for %s%s (%s)" \
|
||||
% (event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)))
|
||||
self.emit("log", "info", "consider defining a PREFERRED_PROVIDER entry to match %s" % (event._item))
|
||||
elif isinstance(event, bb.event.NoProvider):
|
||||
msg = ""
|
||||
if event._runtime:
|
||||
r = "R"
|
||||
else:
|
||||
r = ""
|
||||
if event._dependees:
|
||||
msg = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)\n" % (r, event._item, ", ".join(event._dependees), r)
|
||||
else:
|
||||
msg = "Nothing %sPROVIDES '%s'\n" % (r, event._item)
|
||||
if event._reasons:
|
||||
for reason in event._reasons:
|
||||
msg += ("%s\n" % reason)
|
||||
self.emit("no-provider", msg)
|
||||
self.emit("log", "error", msg)
|
||||
elif isinstance(event, bb.event.LogExecTTY):
|
||||
icon = "dialog-warning"
|
||||
color = HobColors.WARNING
|
||||
if self.sequential or not parent:
|
||||
tree_add = self.model.append
|
||||
else:
|
||||
tree_add = self.model.prepend
|
||||
tree_add(parent,
|
||||
(None,
|
||||
package,
|
||||
task,
|
||||
event.msg,
|
||||
icon,
|
||||
color,
|
||||
0))
|
||||
else:
|
||||
if not isinstance(event, (bb.event.BuildBase,
|
||||
bb.event.StampUpdate,
|
||||
bb.event.ConfigParsed,
|
||||
bb.event.RecipeParsed,
|
||||
bb.event.RecipePreFinalise,
|
||||
bb.runqueue.runQueueEvent,
|
||||
bb.runqueue.runQueueExitWait,
|
||||
bb.event.OperationStarted,
|
||||
bb.event.OperationCompleted,
|
||||
bb.event.OperationProgress)):
|
||||
self.emit("log", "error", "Unknown event: %s" % (event.error if hasattr(event, 'error') else 'error'))
|
||||
|
||||
return
|
||||
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# BitBake Graphical GTK User Interface
|
||||
#
|
||||
# Copyright (C) 2012 Intel Corporation
|
||||
#
|
||||
# Authored by Bogdan Marinescu <bogdan.a.marinescu@intel.com>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import gtk, gobject
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.hobwidget import hic
|
||||
from bb.ui.crumbs.hobpages import HobPage
|
||||
|
||||
#
|
||||
# SanityCheckPage
|
||||
#
|
||||
class SanityCheckPage (HobPage):
|
||||
|
||||
def __init__(self, builder):
|
||||
super(SanityCheckPage, self).__init__(builder)
|
||||
self.running = False
|
||||
self.create_visual_elements()
|
||||
self.show_all()
|
||||
|
||||
def make_label(self, text, bold=True):
|
||||
label = gtk.Label()
|
||||
label.set_alignment(0.0, 0.5)
|
||||
mark = "<span %s>%s</span>" % (self.span_tag('x-large', 'bold') if bold else self.span_tag('medium'), text)
|
||||
label.set_markup(mark)
|
||||
return label
|
||||
|
||||
def start(self):
|
||||
if not self.running:
|
||||
self.running = True
|
||||
gobject.timeout_add(100, self.timer_func)
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
|
||||
def is_running(self):
|
||||
return self.running
|
||||
|
||||
def timer_func(self):
|
||||
self.progress_bar.pulse()
|
||||
return self.running
|
||||
|
||||
def create_visual_elements(self):
|
||||
# Table'd layout. 'rows' and 'cols' give the table size
|
||||
rows, cols = 30, 50
|
||||
self.table = gtk.Table(rows, cols, True)
|
||||
self.pack_start(self.table, expand=False, fill=False)
|
||||
sx, sy = 2, 2
|
||||
# 'info' icon
|
||||
image = gtk.Image()
|
||||
image.set_from_file(hic.ICON_INFO_DISPLAY_FILE)
|
||||
self.table.attach(image, sx, sx + 2, sy, sy + 3 )
|
||||
image.show()
|
||||
# 'Checking' message
|
||||
label = self.make_label('Hob is checking for correct build system setup')
|
||||
self.table.attach(label, sx + 2, cols, sy, sy + 3, xpadding=5 )
|
||||
label.show()
|
||||
# 'Shouldn't take long' message.
|
||||
label = self.make_label("The check shouldn't take long.", False)
|
||||
self.table.attach(label, sx + 2, cols, sy + 3, sy + 4, xpadding=5)
|
||||
label.show()
|
||||
# Progress bar
|
||||
self.progress_bar = HobProgressBar()
|
||||
self.table.attach(self.progress_bar, sx + 2, cols - 3, sy + 5, sy + 7, xpadding=5)
|
||||
self.progress_bar.show()
|
||||
# All done
|
||||
self.table.show()
|
||||
|
||||
@@ -101,19 +101,7 @@ class HobTemplateFile(ConfigFile):
|
||||
return self.dictionary[var]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def getVersion(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
|
||||
pattern = "^\s*(\S+)\s*=\s*(\".*?\")"
|
||||
|
||||
for line in contents:
|
||||
match = re.search(pattern, line)
|
||||
if match:
|
||||
if match.group(1) == "VERSION":
|
||||
return match.group(2).strip('"')
|
||||
return None
|
||||
|
||||
|
||||
def load(self):
|
||||
contents = ConfigFile.readFile(self)
|
||||
self.dictionary.clear()
|
||||
@@ -137,7 +125,7 @@ class RecipeFile(ConfigFile):
|
||||
|
||||
class TemplateMgr(gobject.GObject):
|
||||
|
||||
__gLocalVars__ = ["MACHINE", "PACKAGE_CLASSES", "DISTRO", "DL_DIR", "SSTATE_DIR", "SSTATE_MIRRORS", "PARALLEL_MAKE", "BB_NUMBER_THREADS", "CONF_VERSION"]
|
||||
__gLocalVars__ = ["MACHINE", "PACKAGE_CLASSES", "DISTRO", "DL_DIR", "SSTATE_DIR", "SSTATE_MIRROR", "PARALLEL_MAKE", "BB_NUMBER_THREADS", "CONF_VERSION"]
|
||||
__gBBLayersVars__ = ["BBLAYERS", "LCONF_VERSION"]
|
||||
__gRecipeVars__ = ["DEPENDS", "IMAGE_INSTALL"]
|
||||
|
||||
@@ -148,27 +136,11 @@ class TemplateMgr(gobject.GObject):
|
||||
self.local_conf = None
|
||||
self.image_bb = None
|
||||
|
||||
@classmethod
|
||||
def convert_to_template_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "template-", filename, ".hob")
|
||||
|
||||
@classmethod
|
||||
def convert_to_bblayers_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "bblayers-", filename, ".conf")
|
||||
|
||||
@classmethod
|
||||
def convert_to_local_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "local-", filename, ".conf")
|
||||
|
||||
@classmethod
|
||||
def convert_to_image_pathfilename(cls, filename, path):
|
||||
return "%s/%s%s%s" % (path, "hob-image-", filename, ".bb")
|
||||
|
||||
def open(self, filename, path):
|
||||
self.template_hob = HobTemplateFile(TemplateMgr.convert_to_template_pathfilename(filename, path))
|
||||
self.bblayers_conf = ConfigFile(TemplateMgr.convert_to_bblayers_pathfilename(filename, path))
|
||||
self.local_conf = ConfigFile(TemplateMgr.convert_to_local_pathfilename(filename, path))
|
||||
self.image_bb = RecipeFile(TemplateMgr.convert_to_image_pathfilename(filename, path))
|
||||
self.template_hob = HobTemplateFile("%s/%s%s%s" % (path, "template-", filename, ".hob"))
|
||||
self.bblayers_conf = ConfigFile("%s/%s%s%s" % (path, "bblayers-", filename, ".conf"))
|
||||
self.local_conf = ConfigFile("%s/%s%s%s" % (path, "local-", filename, ".conf"))
|
||||
self.image_bb = RecipeFile("%s/%s%s%s" % (path, "hob-image-", filename, ".bb"))
|
||||
|
||||
def setVar(self, var, val):
|
||||
if var in TemplateMgr.__gLocalVars__:
|
||||
@@ -186,9 +158,6 @@ class TemplateMgr(gobject.GObject):
|
||||
self.image_bb.save()
|
||||
self.template_hob.save()
|
||||
|
||||
def getVersion(self, path):
|
||||
return HobTemplateFile(path).getVersion()
|
||||
|
||||
def load(self, path):
|
||||
self.template_hob = HobTemplateFile(path)
|
||||
self.dictionary = self.template_hob.load()
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
# bitbake which will allow more flexibility.
|
||||
|
||||
import os
|
||||
import bb
|
||||
|
||||
def which_terminal():
|
||||
term = bb.utils.which(os.environ["PATH"], "xterm")
|
||||
|
||||
@@ -24,7 +24,7 @@ import threading
|
||||
import xmlrpclib
|
||||
import bb
|
||||
import bb.event
|
||||
from bb.ui.crumbs.progressbar import HobProgressBar
|
||||
from bb.ui.crumbs.progress import ProgressBar
|
||||
|
||||
# Package Model
|
||||
(COL_PKG_NAME) = (0)
|
||||
@@ -198,23 +198,17 @@ class gtkthread(threading.Thread):
|
||||
|
||||
def main(server, eventHandler):
|
||||
try:
|
||||
cmdline, error = server.runCommand(["getCmdLineAction"])
|
||||
if error:
|
||||
print("Error getting bitbake commandline: %s" % error)
|
||||
return 1
|
||||
elif not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
elif not cmdline or cmdline[0] != "generateDotGraph":
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if cmdline and not cmdline['action']:
|
||||
print(cmdline['msg'])
|
||||
return
|
||||
elif not cmdline or (cmdline['action'] and cmdline['action'][0] != "generateDotGraph"):
|
||||
print("This UI is only compatible with the -g option")
|
||||
return 1
|
||||
ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
|
||||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
return 1
|
||||
elif ret != True:
|
||||
print("Error running command '%s': returned %s" % (cmdline, ret))
|
||||
return 1
|
||||
return
|
||||
ret = server.runCommand(["generateDepTreeEvent", cmdline['action'][1], cmdline['action'][2]])
|
||||
if ret != True:
|
||||
print("Couldn't run command! %s" % ret)
|
||||
return
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return
|
||||
@@ -226,13 +220,8 @@ def main(server, eventHandler):
|
||||
|
||||
gtk.gdk.threads_enter()
|
||||
dep = DepExplorer()
|
||||
bardialog = gtk.Dialog(parent=dep,
|
||||
flags=gtk.DIALOG_MODAL|gtk.DIALOG_DESTROY_WITH_PARENT)
|
||||
bardialog.set_default_size(400, 50)
|
||||
pbar = HobProgressBar()
|
||||
bardialog.vbox.pack_start(pbar)
|
||||
bardialog.show_all()
|
||||
bardialog.connect("delete-event", gtk.main_quit)
|
||||
pbar = ProgressBar(dep)
|
||||
pbar.connect("delete-event", gtk.main_quit)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
progress_total = 0
|
||||
@@ -240,9 +229,7 @@ def main(server, eventHandler):
|
||||
try:
|
||||
event = eventHandler.waitEvent(0.25)
|
||||
if gtkthread.quit.isSet():
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
server.runCommand(["stateStop"])
|
||||
break
|
||||
|
||||
if event is None:
|
||||
@@ -251,20 +238,19 @@ def main(server, eventHandler):
|
||||
if isinstance(event, bb.event.CacheLoadStarted):
|
||||
progress_total = event.total
|
||||
gtk.gdk.threads_enter()
|
||||
bardialog.set_title("Loading Cache")
|
||||
pbar.update(0)
|
||||
pbar.set_title("Loading Cache")
|
||||
pbar.update(0, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadProgress):
|
||||
x = event.current
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
pbar.update(x, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.CacheLoadCompleted):
|
||||
bardialog.hide()
|
||||
pbar.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseStarted):
|
||||
@@ -272,21 +258,19 @@ def main(server, eventHandler):
|
||||
if progress_total == 0:
|
||||
continue
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(0)
|
||||
bardialog.set_title("Processing recipes")
|
||||
|
||||
pbar.set_title("Processing recipes")
|
||||
pbar.update(0, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
|
||||
if isinstance(event, bb.event.ParseProgress):
|
||||
x = event.current
|
||||
gtk.gdk.threads_enter()
|
||||
pbar.update(x * 1.0 / progress_total)
|
||||
pbar.set_title('')
|
||||
pbar.update(x, progress_total)
|
||||
gtk.gdk.threads_leave()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.ParseCompleted):
|
||||
bardialog.hide()
|
||||
pbar.hide()
|
||||
continue
|
||||
|
||||
if isinstance(event, bb.event.DepTreeGenerated):
|
||||
@@ -318,13 +302,9 @@ def main(server, eventHandler):
|
||||
break
|
||||
if shutdown == 1:
|
||||
print("\nSecond Keyboard Interrupt, stopping...\n")
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print('Unable to cleanly stop: %s' % error)
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
print("\nKeyboard Interrupt, closing down...\n")
|
||||
_, error = server.runCommand(["stateShutdown"])
|
||||
if error:
|
||||
print('Unable to cleanly shutdown: %s' % error)
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
@@ -80,19 +80,16 @@ def main (server, eventHandler):
|
||||
running_build.connect ("build-failed", running_build_failed_cb)
|
||||
|
||||
try:
|
||||
cmdline, error = server.runCommand(["getCmdLineAction"])
|
||||
if err:
|
||||
print("Error getting bitbake commandline: %s" % error)
|
||||
return 1
|
||||
elif not cmdline:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
ret, error = server.runCommand(cmdline)
|
||||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
elif not cmdline['action']:
|
||||
print(cmdline['msg'])
|
||||
return 1
|
||||
elif ret != True:
|
||||
print("Error running command '%s': returned %s" % (cmdline, ret))
|
||||
ret = server.runCommand(cmdline['action'])
|
||||
if ret != True:
|
||||
print("Couldn't get default commandline! %s" % ret)
|
||||
return 1
|
||||
except xmlrpclib.Fault as x:
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
|
||||
@@ -30,7 +30,7 @@ try:
|
||||
pygtk.require('2.0') # to be certain we don't have gtk+ 1.x !?!
|
||||
gtkver = gtk.gtk_version
|
||||
pygtkver = gtk.pygtk_version
|
||||
if gtkver < (2, 20, 0) or pygtkver < (2, 21, 0):
|
||||
if gtkver < (2, 18, 0) or pygtkver < (2, 16, 0):
|
||||
sys.exit("%s,\nYou have Gtk+ %s and PyGtk %s." % (requirements,
|
||||
".".join(map(str, gtkver)),
|
||||
".".join(map(str, pygtkver))))
|
||||
|
||||
|
Before Width: | Height: | Size: 4.0 KiB After Width: | Height: | Size: 4.5 KiB |
|
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.5 KiB |
@@ -25,12 +25,7 @@ import sys
|
||||
import xmlrpclib
|
||||
import logging
|
||||
import progressbar
|
||||
import signal
|
||||
import bb.msg
|
||||
import time
|
||||
import fcntl
|
||||
import struct
|
||||
import copy
|
||||
from bb.ui import uihelper
|
||||
|
||||
logger = logging.getLogger("BitBake")
|
||||
@@ -42,21 +37,8 @@ class BBProgress(progressbar.ProgressBar):
|
||||
widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
|
||||
progressbar.ETA()]
|
||||
|
||||
try:
|
||||
self._resize_default = signal.getsignal(signal.SIGWINCH)
|
||||
except:
|
||||
self._resize_default = None
|
||||
progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets)
|
||||
|
||||
def _handle_resize(self, signum, frame):
|
||||
progressbar.ProgressBar._handle_resize(self, signum, frame)
|
||||
if self._resize_default:
|
||||
self._resize_default(signum, frame)
|
||||
def finish(self):
|
||||
progressbar.ProgressBar.finish(self)
|
||||
if self._resize_default:
|
||||
signal.signal(signal.SIGWINCH, self._resize_default)
|
||||
|
||||
class NonInteractiveProgress(object):
|
||||
fobj = sys.stdout
|
||||
|
||||
@@ -88,152 +70,46 @@ def pluralise(singular, plural, qty):
|
||||
else:
|
||||
return plural % qty
|
||||
|
||||
|
||||
class InteractConsoleLogFilter(logging.Filter):
|
||||
def __init__(self, tf, format):
|
||||
self.tf = tf
|
||||
self.format = format
|
||||
|
||||
def filter(self, record):
|
||||
if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
|
||||
return False
|
||||
self.tf.clearFooter()
|
||||
return True
|
||||
|
||||
class TerminalFilter(object):
|
||||
columns = 80
|
||||
|
||||
def sigwinch_handle(self, signum, frame):
|
||||
self.columns = self.getTerminalColumns()
|
||||
if self._sigwinch_default:
|
||||
self._sigwinch_default(signum, frame)
|
||||
|
||||
def getTerminalColumns(self):
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
cr = struct.unpack('hh', fcntl.ioctl(fd, self.termios.TIOCGWINSZ, '1234'))
|
||||
except:
|
||||
return None
|
||||
return cr
|
||||
cr = ioctl_GWINSZ(sys.stdout.fileno())
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
try:
|
||||
cr = (env['LINES'], env['COLUMNS'])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return cr[1]
|
||||
|
||||
def __init__(self, main, helper, console, format):
|
||||
self.main = main
|
||||
self.helper = helper
|
||||
self.cuu = None
|
||||
self.stdinbackup = None
|
||||
self.interactive = sys.stdout.isatty()
|
||||
self.footer_present = False
|
||||
self.lastpids = []
|
||||
|
||||
if not self.interactive:
|
||||
return
|
||||
|
||||
try:
|
||||
import curses
|
||||
except ImportError:
|
||||
sys.exit("FATAL: The knotty ui could not load the required curses python module.")
|
||||
|
||||
import termios
|
||||
self.curses = curses
|
||||
self.termios = termios
|
||||
try:
|
||||
fd = sys.stdin.fileno()
|
||||
self.stdinbackup = termios.tcgetattr(fd)
|
||||
new = copy.deepcopy(self.stdinbackup)
|
||||
new[3] = new[3] & ~termios.ECHO
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, new)
|
||||
curses.setupterm()
|
||||
self.ed = curses.tigetstr("ed")
|
||||
if self.ed:
|
||||
self.cuu = curses.tigetstr("cuu")
|
||||
try:
|
||||
self._sigwinch_default = signal.getsignal(signal.SIGWINCH)
|
||||
signal.signal(signal.SIGWINCH, self.sigwinch_handle)
|
||||
except:
|
||||
pass
|
||||
self.columns = self.getTerminalColumns()
|
||||
except:
|
||||
self.cuu = None
|
||||
console.addFilter(InteractConsoleLogFilter(self, format))
|
||||
|
||||
def clearFooter(self):
|
||||
if self.footer_present:
|
||||
lines = self.footer_present
|
||||
sys.stdout.write(self.curses.tparm(self.cuu, lines))
|
||||
sys.stdout.write(self.curses.tparm(self.ed))
|
||||
self.footer_present = False
|
||||
return
|
||||
|
||||
def updateFooter(self):
|
||||
if not self.cuu:
|
||||
if not main.shutdown or not self.helper.needUpdate:
|
||||
return
|
||||
|
||||
activetasks = self.helper.running_tasks
|
||||
failedtasks = self.helper.failed_tasks
|
||||
runningpids = self.helper.running_pids
|
||||
if self.footer_present and (self.lastcount == self.helper.tasknumber_current) and (self.lastpids == runningpids):
|
||||
return
|
||||
if self.footer_present:
|
||||
self.clearFooter()
|
||||
if not self.helper.tasknumber_total or self.helper.tasknumber_current == self.helper.tasknumber_total:
|
||||
|
||||
if len(runningpids) == 0:
|
||||
return
|
||||
|
||||
self.helper.getTasks()
|
||||
|
||||
tasks = []
|
||||
for t in runningpids:
|
||||
tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
|
||||
|
||||
if self.main.shutdown:
|
||||
content = "Waiting for %s running tasks to finish:" % len(activetasks)
|
||||
elif not len(activetasks):
|
||||
content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
|
||||
if main.shutdown:
|
||||
print("Waiting for %s running tasks to finish:" % len(activetasks))
|
||||
else:
|
||||
content = "Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total)
|
||||
print content
|
||||
lines = 1 + int(len(content) / (self.columns + 1))
|
||||
print("Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total))
|
||||
for tasknum, task in enumerate(tasks):
|
||||
content = "%s: %s" % (tasknum, task)
|
||||
print content
|
||||
lines = lines + 1 + int(len(content) / (self.columns + 1))
|
||||
self.footer_present = lines
|
||||
self.lastpids = runningpids[:]
|
||||
self.lastcount = self.helper.tasknumber_current
|
||||
print("%s: %s" % (tasknum, task))
|
||||
|
||||
def finish(self):
|
||||
if self.stdinbackup:
|
||||
fd = sys.stdin.fileno()
|
||||
self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
|
||||
return
|
||||
|
||||
def main(server, eventHandler, tf = TerminalFilter):
|
||||
|
||||
# Get values of variables which control our output
|
||||
includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error)
|
||||
return 1
|
||||
loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
|
||||
return 1
|
||||
consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
if error:
|
||||
logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
|
||||
return 1
|
||||
|
||||
if sys.stdin.isatty() and sys.stdout.isatty():
|
||||
log_exec_tty = True
|
||||
else:
|
||||
log_exec_tty = False
|
||||
includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
|
||||
loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
|
||||
consolelogfile = server.runCommand(["getVariable", "BB_CONSOLELOG"])
|
||||
|
||||
helper = uihelper.BBUIHelper()
|
||||
|
||||
@@ -243,29 +119,25 @@ def main(server, eventHandler, tf = TerminalFilter):
|
||||
console.setFormatter(format)
|
||||
logger.addHandler(console)
|
||||
if consolelogfile:
|
||||
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
|
||||
consolelog = logging.FileHandler(consolelogfile)
|
||||
bb.msg.addDefaultlogFilter(consolelog)
|
||||
consolelog.setFormatter(format)
|
||||
logger.addHandler(consolelog)
|
||||
|
||||
try:
|
||||
cmdline, error = server.runCommand(["getCmdLineAction"])
|
||||
if error:
|
||||
logger.error("Unable to get bitbake commandline arguments: %s" % error)
|
||||
return 1
|
||||
elif not cmdline:
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return 1
|
||||
ret, error = server.runCommand(cmdline)
|
||||
if error:
|
||||
logger.error("Command '%s' failed: %s" % (cmdline, error))
|
||||
elif not cmdline['action']:
|
||||
print(cmdline['msg'])
|
||||
return 1
|
||||
elif ret != True:
|
||||
logger.error("Command '%s' failed: returned %s" % (cmdline, ret))
|
||||
ret = server.runCommand(cmdline['action'])
|
||||
if ret != True:
|
||||
print("Couldn't get default commandline! %s" % ret)
|
||||
return 1
|
||||
except xmlrpclib.Fault as x:
|
||||
logger.error("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
print("XMLRPC Fault getting commandline:\n %s" % x)
|
||||
return 1
|
||||
|
||||
parseprogress = None
|
||||
@@ -292,20 +164,6 @@ def main(server, eventHandler, tf = TerminalFilter):
|
||||
if not main.shutdown:
|
||||
main.shutdown = 1
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
if log_exec_tty:
|
||||
tries = event.retries
|
||||
while tries:
|
||||
print "Trying to run: %s" % event.prog
|
||||
if os.system(event.prog) == 0:
|
||||
break
|
||||
time.sleep(event.sleep_delay)
|
||||
tries -= 1
|
||||
if tries:
|
||||
continue
|
||||
logger.warn(event.msg)
|
||||
continue
|
||||
|
||||
if isinstance(event, logging.LogRecord):
|
||||
if event.levelno >= format.ERROR:
|
||||
errors = errors + 1
|
||||
@@ -449,8 +307,7 @@ def main(server, eventHandler, tf = TerminalFilter):
|
||||
bb.runqueue.runQueueExitWait,
|
||||
bb.event.OperationStarted,
|
||||
bb.event.OperationCompleted,
|
||||
bb.event.OperationProgress,
|
||||
bb.event.DiskFull)):
|
||||
bb.event.OperationProgress)):
|
||||
continue
|
||||
|
||||
logger.error("Unknown event: %s", event)
|
||||
@@ -461,19 +318,14 @@ def main(server, eventHandler, tf = TerminalFilter):
|
||||
if ioerror.args[0] == 4:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
import time
|
||||
termfilter.clearFooter()
|
||||
if main.shutdown == 1:
|
||||
print("\nSecond Keyboard Interrupt, stopping...\n")
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
logger.error("Unable to cleanly stop: %s" % error)
|
||||
server.runCommand(["stateStop"])
|
||||
if main.shutdown == 0:
|
||||
print("\nKeyboard Interrupt, closing down...\n")
|
||||
interrupted = True
|
||||
_, error = server.runCommand(["stateShutdown"])
|
||||
if error:
|
||||
logger.error("Unable to cleanly shutdown: %s" % error)
|
||||
print("\nKeyboard Interrupt, closing down...\n")
|
||||
server.runCommand(["stateShutdown"])
|
||||
main.shutdown = main.shutdown + 1
|
||||
pass
|
||||
|
||||
|
||||
109
bitbake/lib/bb/ui/knotty2.py
Normal file
@@ -0,0 +1,109 @@
|
||||
#
|
||||
# BitBake (No)TTY UI Implementation (v2)
|
||||
#
|
||||
# Handling output to TTYs or files (no TTY)
|
||||
#
|
||||
# Copyright (C) 2012 Richard Purdie
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
from bb.ui import knotty
|
||||
import logging
|
||||
import sys
|
||||
logger = logging.getLogger("BitBake")
|
||||
|
||||
class InteractConsoleLogFilter(logging.Filter):
|
||||
def __init__(self, tf, format):
|
||||
self.tf = tf
|
||||
self.format = format
|
||||
|
||||
def filter(self, record):
|
||||
if record.levelno == self.format.NOTE and (record.msg.startswith("Running") or record.msg.startswith("package ")):
|
||||
return False
|
||||
self.tf.clearFooter()
|
||||
return True
|
||||
|
||||
class TerminalFilter2(object):
|
||||
def __init__(self, main, helper, console, format):
|
||||
self.main = main
|
||||
self.helper = helper
|
||||
self.cuu = None
|
||||
self.stdinbackup = None
|
||||
self.interactive = sys.stdout.isatty()
|
||||
self.footer_present = False
|
||||
self.lastpids = []
|
||||
|
||||
if not self.interactive:
|
||||
return
|
||||
|
||||
import curses
|
||||
import termios
|
||||
import copy
|
||||
self.curses = curses
|
||||
self.termios = termios
|
||||
try:
|
||||
fd = sys.stdin.fileno()
|
||||
self.stdinbackup = termios.tcgetattr(fd)
|
||||
new = copy.deepcopy(self.stdinbackup)
|
||||
new[3] = new[3] & ~termios.ECHO
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, new)
|
||||
curses.setupterm()
|
||||
self.ed = curses.tigetstr("ed")
|
||||
if self.ed:
|
||||
self.cuu = curses.tigetstr("cuu")
|
||||
except:
|
||||
self.cuu = None
|
||||
console.addFilter(InteractConsoleLogFilter(self, format))
|
||||
|
||||
def clearFooter(self):
|
||||
if self.footer_present:
|
||||
lines = self.footer_present
|
||||
sys.stdout.write(self.curses.tparm(self.cuu, lines))
|
||||
sys.stdout.write(self.curses.tparm(self.ed))
|
||||
self.footer_present = False
|
||||
|
||||
def updateFooter(self):
|
||||
if not self.cuu:
|
||||
return
|
||||
activetasks = self.helper.running_tasks
|
||||
failedtasks = self.helper.failed_tasks
|
||||
runningpids = self.helper.running_pids
|
||||
if self.footer_present and (self.lastpids == runningpids):
|
||||
return
|
||||
if self.footer_present:
|
||||
self.clearFooter()
|
||||
if not activetasks:
|
||||
return
|
||||
lines = 1
|
||||
tasks = []
|
||||
for t in runningpids:
|
||||
tasks.append("%s (pid %s)" % (activetasks[t]["title"], t))
|
||||
|
||||
if self.main.shutdown:
|
||||
print("Waiting for %s running tasks to finish:" % len(activetasks))
|
||||
else:
|
||||
print("Currently %s running tasks (%s of %s):" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total))
|
||||
for tasknum, task in enumerate(tasks):
|
||||
print("%s: %s" % (tasknum, task))
|
||||
lines = lines + 1
|
||||
self.footer_present = lines
|
||||
self.lastpids = runningpids[:]
|
||||
|
||||
def finish(self):
|
||||
if self.stdinbackup:
|
||||
fd = sys.stdin.fileno()
|
||||
self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup)
|
||||
|
||||
def main(server, eventHandler):
|
||||
bb.ui.knotty.main(server, eventHandler, TerminalFilter2)
|
||||
@@ -47,13 +47,7 @@
|
||||
|
||||
from __future__ import division
|
||||
import logging
|
||||
import os, sys, itertools, time, subprocess
|
||||
|
||||
try:
|
||||
import curses
|
||||
except ImportError:
|
||||
sys.exit("FATAL: The ncurses ui could not load the required curses python module.")
|
||||
|
||||
import os, sys, curses, itertools, time
|
||||
import bb
|
||||
import xmlrpclib
|
||||
from bb import ui
|
||||
@@ -236,18 +230,15 @@ class NCursesUI:
|
||||
shutdown = 0
|
||||
|
||||
try:
|
||||
cmdline, error = server.runCommand(["getCmdLineAction"])
|
||||
cmdline = server.runCommand(["getCmdLineAction"])
|
||||
if not cmdline:
|
||||
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
|
||||
return
|
||||
elif error:
|
||||
print("Error getting bitbake commandline: %s" % error)
|
||||
elif not cmdline['action']:
|
||||
print(cmdline['msg'])
|
||||
return
|
||||
ret, error = server.runCommand(cmdline)
|
||||
if error:
|
||||
print("Error running command '%s': %s" % (cmdline, error))
|
||||
return
|
||||
elif ret != True:
|
||||
ret = server.runCommand(cmdline['action'])
|
||||
if ret != True:
|
||||
print("Couldn't get default commandlind! %s" % ret)
|
||||
return
|
||||
except xmlrpclib.Fault as x:
|
||||
@@ -295,7 +286,7 @@ class NCursesUI:
|
||||
# bb.error("log data follows (%s)" % logfile)
|
||||
# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
|
||||
# if number_of_lines:
|
||||
# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True)
|
||||
# os.system('tail -n%s %s' % (number_of_lines, logfile))
|
||||
# else:
|
||||
# f = open(logfile, "r")
|
||||
# while True:
|
||||
@@ -321,8 +312,6 @@ class NCursesUI:
|
||||
if isinstance(event, bb.cooker.CookerExit):
|
||||
exitflag = True
|
||||
|
||||
if isinstance(event, bb.event.LogExecTTY):
|
||||
mw.appendText('WARN: ' + event.msg + '\n')
|
||||
if helper.needUpdate:
|
||||
activetasks, failedtasks = helper.getTasks()
|
||||
taw.erase()
|
||||
@@ -348,14 +337,10 @@ class NCursesUI:
|
||||
exitflag = True
|
||||
if shutdown == 1:
|
||||
mw.appendText("Second Keyboard Interrupt, stopping...\n")
|
||||
_, error = server.runCommand(["stateStop"])
|
||||
if error:
|
||||
print("Unable to cleanly stop: %s" % error)
|
||||
server.runCommand(["stateStop"])
|
||||
if shutdown == 0:
|
||||
mw.appendText("Keyboard Interrupt, closing down...\n")
|
||||
_, error = server.runCommand(["stateShutdown"])
|
||||
if error:
|
||||
print("Unable to cleanly shutdown: %s" % error)
|
||||
server.runCommand(["stateShutdown"])
|
||||
shutdown = shutdown + 1
|
||||
pass
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ class BBUIHelper:
|
||||
self.running_pids.remove(event.pid)
|
||||
self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
|
||||
self.needUpdate = True
|
||||
if isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
|
||||
if isinstance(event, bb.runqueue.runQueueTaskStarted):
|
||||
self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1
|
||||
self.tasknumber_total = event.stats.total
|
||||
|
||||
|
||||
@@ -26,12 +26,14 @@ import logging
|
||||
import bb
|
||||
import bb.msg
|
||||
import multiprocessing
|
||||
import fcntl
|
||||
from commands import getstatusoutput
|
||||
from contextlib import contextmanager
|
||||
|
||||
logger = logging.getLogger("BitBake.Util")
|
||||
|
||||
# Version comparison
|
||||
separators = ".-"
|
||||
|
||||
# Context used in better_exec, eval
|
||||
_context = {
|
||||
"os": os,
|
||||
@@ -46,18 +48,15 @@ def explode_version(s):
|
||||
while (s != ''):
|
||||
if s[0] in string.digits:
|
||||
m = numeric_regexp.match(s)
|
||||
r.append((0, int(m.group(1))))
|
||||
r.append(int(m.group(1)))
|
||||
s = m.group(2)
|
||||
continue
|
||||
if s[0] in string.letters:
|
||||
m = alpha_regexp.match(s)
|
||||
r.append((1, m.group(1)))
|
||||
r.append(m.group(1))
|
||||
s = m.group(2)
|
||||
continue
|
||||
if s[0] == '~':
|
||||
r.append((-1, s[0]))
|
||||
else:
|
||||
r.append((2, s[0]))
|
||||
r.append(s[0])
|
||||
s = s[1:]
|
||||
return r
|
||||
|
||||
@@ -78,25 +77,33 @@ def split_version(s):
|
||||
def vercmp_part(a, b):
|
||||
va = explode_version(a)
|
||||
vb = explode_version(b)
|
||||
sa = False
|
||||
sb = False
|
||||
while True:
|
||||
if va == []:
|
||||
(oa, ca) = (0, None)
|
||||
ca = None
|
||||
else:
|
||||
(oa, ca) = va.pop(0)
|
||||
ca = va.pop(0)
|
||||
if vb == []:
|
||||
(ob, cb) = (0, None)
|
||||
cb = None
|
||||
else:
|
||||
(ob, cb) = vb.pop(0)
|
||||
if (oa, ca) == (0, None) and (ob, cb) == (0, None):
|
||||
cb = vb.pop(0)
|
||||
if ca == None and cb == None:
|
||||
return 0
|
||||
if oa < ob:
|
||||
|
||||
if isinstance(ca, basestring):
|
||||
sa = ca in separators
|
||||
if isinstance(cb, basestring):
|
||||
sb = cb in separators
|
||||
if sa and not sb:
|
||||
return -1
|
||||
elif oa > ob:
|
||||
if not sa and sb:
|
||||
return 1
|
||||
elif ca < cb:
|
||||
|
||||
if ca > cb:
|
||||
return 1
|
||||
if ca < cb:
|
||||
return -1
|
||||
elif ca > cb:
|
||||
return 1
|
||||
|
||||
def vercmp(ta, tb):
|
||||
(ea, va, ra) = ta
|
||||
@@ -109,10 +116,130 @@ def vercmp(ta, tb):
|
||||
r = vercmp_part(ra, rb)
|
||||
return r
|
||||
|
||||
def vercmp_string(a, b):
|
||||
ta = split_version(a)
|
||||
tb = split_version(b)
|
||||
return vercmp(ta, tb)
|
||||
_package_weights_ = {"pre":-2, "p":0, "alpha":-4, "beta":-3, "rc":-1} # dicts are unordered
|
||||
_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
|
||||
|
||||
def relparse(myver):
|
||||
"""Parses the last elements of a version number into a triplet, that can
|
||||
later be compared.
|
||||
"""
|
||||
|
||||
number = 0
|
||||
p1 = 0
|
||||
p2 = 0
|
||||
mynewver = myver.split('_')
|
||||
if len(mynewver) == 2:
|
||||
# an _package_weights_
|
||||
number = float(mynewver[0])
|
||||
match = 0
|
||||
for x in _package_ends_:
|
||||
elen = len(x)
|
||||
if mynewver[1][:elen] == x:
|
||||
match = 1
|
||||
p1 = _package_weights_[x]
|
||||
try:
|
||||
p2 = float(mynewver[1][elen:])
|
||||
except:
|
||||
p2 = 0
|
||||
break
|
||||
if not match:
|
||||
# normal number or number with letter at end
|
||||
divider = len(myver)-1
|
||||
if myver[divider:] not in "1234567890":
|
||||
# letter at end
|
||||
p1 = ord(myver[divider:])
|
||||
number = float(myver[0:divider])
|
||||
else:
|
||||
number = float(myver)
|
||||
else:
|
||||
# normal number or number with letter at end
|
||||
divider = len(myver)-1
|
||||
if myver[divider:] not in "1234567890":
|
||||
#letter at end
|
||||
p1 = ord(myver[divider:])
|
||||
number = float(myver[0:divider])
|
||||
else:
|
||||
number = float(myver)
|
||||
return [number, p1, p2]
|
||||
|
||||
__vercmp_cache__ = {}
|
||||
|
||||
def vercmp_string(val1, val2):
|
||||
"""This takes two version strings and returns an integer to tell you whether
|
||||
the versions are the same, val1>val2 or val2>val1.
|
||||
"""
|
||||
|
||||
# quick short-circuit
|
||||
if val1 == val2:
|
||||
return 0
|
||||
valkey = val1 + " " + val2
|
||||
|
||||
# cache lookup
|
||||
try:
|
||||
return __vercmp_cache__[valkey]
|
||||
try:
|
||||
return - __vercmp_cache__[val2 + " " + val1]
|
||||
except KeyError:
|
||||
pass
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# consider 1_p2 vc 1.1
|
||||
# after expansion will become (1_p2,0) vc (1,1)
|
||||
# then 1_p2 is compared with 1 before 0 is compared with 1
|
||||
# to solve the bug we need to convert it to (1,0_p2)
|
||||
# by splitting _prepart part and adding it back _after_expansion
|
||||
|
||||
val1_prepart = val2_prepart = ''
|
||||
if val1.count('_'):
|
||||
val1, val1_prepart = val1.split('_', 1)
|
||||
if val2.count('_'):
|
||||
val2, val2_prepart = val2.split('_', 1)
|
||||
|
||||
# replace '-' by '.'
|
||||
# FIXME: Is it needed? can val1/2 contain '-'?
|
||||
|
||||
val1 = val1.split("-")
|
||||
if len(val1) == 2:
|
||||
val1[0] = val1[0] + "." + val1[1]
|
||||
val2 = val2.split("-")
|
||||
if len(val2) == 2:
|
||||
val2[0] = val2[0] + "." + val2[1]
|
||||
|
||||
val1 = val1[0].split('.')
|
||||
val2 = val2[0].split('.')
|
||||
|
||||
# add back decimal point so that .03 does not become "3" !
|
||||
for x in xrange(1, len(val1)):
|
||||
if val1[x][0] == '0' :
|
||||
val1[x] = '.' + val1[x]
|
||||
for x in xrange(1, len(val2)):
|
||||
if val2[x][0] == '0' :
|
||||
val2[x] = '.' + val2[x]
|
||||
|
||||
# extend varion numbers
|
||||
if len(val2) < len(val1):
|
||||
val2.extend(["0"]*(len(val1)-len(val2)))
|
||||
elif len(val1) < len(val2):
|
||||
val1.extend(["0"]*(len(val2)-len(val1)))
|
||||
|
||||
# add back _prepart tails
|
||||
if val1_prepart:
|
||||
val1[-1] += '_' + val1_prepart
|
||||
if val2_prepart:
|
||||
val2[-1] += '_' + val2_prepart
|
||||
# The above code will extend version numbers out so they
|
||||
# have the same number of digits.
|
||||
for x in xrange(0, len(val1)):
|
||||
cmp1 = relparse(val1[x])
|
||||
cmp2 = relparse(val2[x])
|
||||
for y in xrange(0, 3):
|
||||
myret = cmp1[y] - cmp2[y]
|
||||
if myret != 0:
|
||||
__vercmp_cache__[valkey] = myret
|
||||
return myret
|
||||
__vercmp_cache__[valkey] = 0
|
||||
return 0
|
||||
|
||||
def explode_deps(s):
|
||||
"""
|
||||
@@ -138,7 +265,7 @@ def explode_deps(s):
|
||||
#r[-1] += ' ' + ' '.join(j)
|
||||
return r
|
||||
|
||||
def explode_dep_versions2(s):
|
||||
def explode_dep_versions(s):
|
||||
"""
|
||||
Take an RDEPENDS style string of format:
|
||||
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
|
||||
@@ -147,70 +274,24 @@ def explode_dep_versions2(s):
|
||||
r = {}
|
||||
l = s.replace(",", "").split()
|
||||
lastdep = None
|
||||
lastcmp = ""
|
||||
lastver = ""
|
||||
incmp = False
|
||||
inversion = False
|
||||
for i in l:
|
||||
if i[0] == '(':
|
||||
incmp = True
|
||||
i = i[1:].strip()
|
||||
if not i:
|
||||
continue
|
||||
|
||||
if incmp:
|
||||
incmp = False
|
||||
inversion = True
|
||||
# This list is based on behavior and supported comparisons from deb, opkg and rpm.
|
||||
#
|
||||
# Even though =<, <<, ==, !=, =>, and >> may not be supported,
|
||||
# we list each possibly valid item.
|
||||
# The build system is responsible for validation of what it supports.
|
||||
if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
|
||||
lastcmp = i[0:2]
|
||||
i = i[2:]
|
||||
elif i.startswith(('<', '>', '=')):
|
||||
lastcmp = i[0:1]
|
||||
i = i[1:]
|
||||
else:
|
||||
# This is an unsupported case!
|
||||
lastcmp = (i or "")
|
||||
i = ""
|
||||
i.strip()
|
||||
if not i:
|
||||
continue
|
||||
lastver = i[1:] or ""
|
||||
#j = []
|
||||
elif inversion and i.endswith(')'):
|
||||
inversion = False
|
||||
lastver = lastver + " " + (i[:-1] or "")
|
||||
r[lastdep] = lastver
|
||||
elif not inversion:
|
||||
r[i] = None
|
||||
lastdep = i
|
||||
lastver = ""
|
||||
elif inversion:
|
||||
lastver = lastver + " " + i
|
||||
|
||||
if inversion:
|
||||
if i.endswith(')'):
|
||||
i = i[:-1] or ""
|
||||
inversion = False
|
||||
if lastver and i:
|
||||
lastver += " "
|
||||
if i:
|
||||
lastver += i
|
||||
if lastdep not in r:
|
||||
r[lastdep] = []
|
||||
r[lastdep].append(lastcmp + " " + lastver)
|
||||
continue
|
||||
|
||||
#if not inversion:
|
||||
lastdep = i
|
||||
lastver = ""
|
||||
lastcmp = ""
|
||||
if not (i in r and r[i]):
|
||||
r[lastdep] = []
|
||||
|
||||
return r
|
||||
|
||||
def explode_dep_versions(s):
|
||||
r = explode_dep_versions2(s)
|
||||
for d in r:
|
||||
if not r[d]:
|
||||
r[d] = None
|
||||
continue
|
||||
if len(r[d]) > 1:
|
||||
bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
|
||||
r[d] = r[d][0]
|
||||
return r
|
||||
|
||||
def join_deps(deps, commasep=True):
|
||||
@@ -220,11 +301,7 @@ def join_deps(deps, commasep=True):
|
||||
result = []
|
||||
for dep in deps:
|
||||
if deps[dep]:
|
||||
if isinstance(deps[dep], list):
|
||||
for v in deps[dep]:
|
||||
result.append(dep + " (" + v + ")")
|
||||
else:
|
||||
result.append(dep + " (" + deps[dep] + ")")
|
||||
result.append(dep + " (" + deps[dep] + ")")
|
||||
else:
|
||||
result.append(dep)
|
||||
if commasep:
|
||||
@@ -266,23 +343,20 @@ def better_compile(text, file, realfile, mode = "exec"):
|
||||
for line in body:
|
||||
logger.error(line)
|
||||
|
||||
e = bb.BBHandledException(e)
|
||||
raise e
|
||||
raise
|
||||
|
||||
def better_exec(code, context, text = None, realfile = "<code>"):
|
||||
def better_exec(code, context, text, realfile = "<code>"):
|
||||
"""
|
||||
Similiar to better_compile, better_exec will
|
||||
print the lines that are responsible for the
|
||||
error.
|
||||
"""
|
||||
import bb.parse
|
||||
if not text:
|
||||
text = code
|
||||
if not hasattr(code, "co_filename"):
|
||||
code = better_compile(code, realfile, realfile)
|
||||
try:
|
||||
exec(code, _context, context)
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
(t, value, tb) = sys.exc_info()
|
||||
|
||||
if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
|
||||
@@ -307,32 +381,22 @@ def better_exec(code, context, text = None, realfile = "<code>"):
|
||||
|
||||
logger.error("The code that was being executed was:")
|
||||
_print_trace(textarray, linefailed)
|
||||
logger.error("[From file: '%s', lineno: %s, function: %s]", tbextract[0][0], tbextract[0][1], tbextract[0][2])
|
||||
logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
|
||||
|
||||
# See if this is a function we constructed and has calls back into other functions in
|
||||
# "text". If so, try and improve the context of the error by diving down the trace
|
||||
level = 0
|
||||
nexttb = tb.tb_next
|
||||
while nexttb is not None and (level+1) < len(tbextract):
|
||||
while nexttb is not None:
|
||||
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
|
||||
_print_trace(textarray, tbextract[level+1][1])
|
||||
logger.error("[From file: '%s', lineno: %s, function: %s]", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
|
||||
elif "d" in context and tbextract[level+1][2]:
|
||||
d = context["d"]
|
||||
functionname = tbextract[level+1][2]
|
||||
text = d.getVar(functionname, True)
|
||||
if text:
|
||||
_print_trace(text.split('\n'), tbextract[level+1][1])
|
||||
logger.error("[From file: '%s', lineno: %s, function: %s]", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
|
||||
else:
|
||||
break
|
||||
logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
|
||||
else:
|
||||
break
|
||||
nexttb = tb.tb_next
|
||||
level = level + 1
|
||||
|
||||
e = bb.BBHandledException(e)
|
||||
raise e
|
||||
raise
|
||||
|
||||
def simple_exec(code, context):
|
||||
exec(code, _context, context)
|
||||
@@ -486,6 +550,8 @@ def preserved_envvars():
|
||||
'BB_PRESERVE_ENV',
|
||||
'BB_ENV_WHITELIST',
|
||||
'BB_ENV_EXTRAWHITE',
|
||||
'LANG',
|
||||
'_',
|
||||
]
|
||||
return v + preserved_envvars_exported() + preserved_envvars_exported_interactive()
|
||||
|
||||
@@ -783,8 +849,6 @@ def which(path, item, direction = 0):
|
||||
for p in paths:
|
||||
next = os.path.join(p, item)
|
||||
if os.path.exists(next):
|
||||
if not os.path.isabs(next):
|
||||
next = os.path.abspath(next)
|
||||
return next
|
||||
|
||||
return ""
|
||||
@@ -816,7 +880,3 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
|
||||
|
||||
def cpu_count():
|
||||
return multiprocessing.cpu_count()
|
||||
|
||||
def nonblockingfd(fd):
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
|
||||
|
||||
|
||||
@@ -266,20 +266,17 @@ def is_local_special(host, port):
|
||||
else:
|
||||
return False
|
||||
|
||||
class PRServiceConfigError(Exception):
|
||||
pass
|
||||
|
||||
def auto_start(d):
|
||||
global singleton
|
||||
if (not d.getVar('PRSERV_HOST', True)) or (not d.getVar('PRSERV_PORT', True)):
|
||||
return
|
||||
return True
|
||||
|
||||
if is_local_special(d.getVar('PRSERV_HOST', True), int(d.getVar('PRSERV_PORT', True))) and not singleton:
|
||||
import bb.utils
|
||||
cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True))
|
||||
if not cachedir:
|
||||
logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
|
||||
raise PRServiceConfigError
|
||||
sys.exit(1)
|
||||
bb.utils.mkdirhier(cachedir)
|
||||
dbfile = os.path.join(cachedir, "prserv.sqlite3")
|
||||
logfile = os.path.join(cachedir, "prserv.log")
|
||||
@@ -295,7 +292,7 @@ def auto_start(d):
|
||||
return PRServerConnection(host,port).ping()
|
||||
except Exception:
|
||||
logger.critical("PRservice %s:%d not available" % (host, port))
|
||||
raise PRServiceConfigError
|
||||
return False
|
||||
|
||||
def auto_shutdown(d=None):
|
||||
global singleton
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
# This is a single Makefile to handle all generated Yocto Project documents.
|
||||
# The Makefile needs to live in the documents directory and all figures used
|
||||
# in any manuals must be .PNG files and live in the individual book's figures
|
||||
# directory as well as in the figures directory for the mega-manual.
|
||||
# Note that the figures for the Yocto Project Development Manual
|
||||
# differ depending on the BRANCH being built.
|
||||
# directory. Note that the figures for the Yocto Project Development Manual
|
||||
# differ between the 'master' and 'edison' branches.
|
||||
#
|
||||
# The Makefile has these targets:
|
||||
#
|
||||
# pdf: generates a PDF version of a manual. Not valid for the Quick Start
|
||||
# or the mega-manual (single, large HTML file comprised of all
|
||||
# Yocto Project manuals).
|
||||
# html: generates an HTML version of a manual.
|
||||
# tarball: creates a tarball for the doc files.
|
||||
# validate: validates
|
||||
@@ -17,22 +14,18 @@
|
||||
# clean: removes files
|
||||
#
|
||||
# The Makefile generates an HTML and PDF version of every document except the
|
||||
# Yocto Project Quick Start and the single, HTML mega-manual, which is comprised
|
||||
# of all the individual Yocto Project manuals. These two manuals are in HTML
|
||||
# form only. The variable DOC indicates the folder name for a given manual. The
|
||||
# variable VER represents the distro version of the Yocto Release for which the
|
||||
# manuals are being generated. The variable BRANCH is used to indicate the
|
||||
# branch (edison or denzil) and is used only when DOC=dev-manual or
|
||||
# DOC=mega-manual. If you do not specify a BRANCH, the default branch used
|
||||
# will be for the latest Yocto Project release. If you build for either
|
||||
# edison or denzil, you must use BRANCH. You do not need to use BRANCH for
|
||||
# any release beyond denzil.
|
||||
# Yocto Project Quick Start. The Quick Start is in HTML form only. The variable
|
||||
# DOC is used to indicate the folder name for a given manual. The variable
|
||||
# VER represents the distro version of the Yocto Release for which the manuals
|
||||
# are being generated. The variable BRANCH is used to indicate the 'edison'
|
||||
# branch and is used only when DOC=dev-manual (making the YP Development
|
||||
# Manual).
|
||||
#
|
||||
# To build a manual, you must invoke Makefile with the DOC argument. If you
|
||||
# are going to publish the manual, then you must invoke Makefile with both the
|
||||
# DOC and the VER argument. Furthermore, if you are building or publishing
|
||||
# the edison or denzil versions of the Yocto Poject Development Manual or
|
||||
# the mega-manual, you must also use the BRANCH argument.
|
||||
# To build the HTML and PDF versions of the manual you must invoke the Makefile
|
||||
# with the DOC argument. If you are going to publish the manual then you
|
||||
# you must invoke the Makefile with both the DOC and the VER argument.
|
||||
# If you are building the 'edison' version of the YP DEvelopment Manual then
|
||||
# you must use the DOC and BRANCH arguments.
|
||||
#
|
||||
# Examples:
|
||||
#
|
||||
@@ -40,43 +33,39 @@
|
||||
# make DOC=yocto-project-qs
|
||||
# make pdf DOC=poky-ref-manual
|
||||
# make DOC=dev-manual BRANCH=edison
|
||||
# make DOC=mega-manual BRANCH=denzil
|
||||
#
|
||||
# The first example generates the HTML and PDF versions of the BSP Guide.
|
||||
# The second example generates the HTML version only of the Quick Start. Note that
|
||||
# the Quick Start only has an HTML version available. The third example generates
|
||||
# both the PDF and HTML versions of the Yocto Project Reference Manual. The
|
||||
# fourth example generates both the PDF and HTML 'edison' versions of the YP
|
||||
# Development Manual. The last exmample generates the HTML version of the
|
||||
# mega-manual and uses the 'denzil' branch when choosing figures for the
|
||||
# tarball of figures. Any example that does not use the BRANCH argument
|
||||
# builds the current version of the manual set.
|
||||
# last example generates both the PDF and HTML 'edison' versions of the YP
|
||||
# Development Manual.
|
||||
#
|
||||
# Use the publish target to push the generated manuals to the Yocto Project
|
||||
# website. All files needed for the manual's HTML form are pushed as well as the
|
||||
# PDF version (if applicable).
|
||||
# Examples:
|
||||
#
|
||||
# make publish DOC=bsp-guide VER=1.3
|
||||
# make publish DOC=adt-manual VER=1.3
|
||||
# make publish DOC=bsp-guide VER=1.2
|
||||
# make publish DOC=adt-manual VER=1.2
|
||||
# make publish DOC=dev-manual VER=1.1.1 BRANCH=edison
|
||||
# make publish DOC=dev-manual VER=1.2 BRANCH=denzil
|
||||
# make publish DOC=dev-manual VER=1.2
|
||||
#
|
||||
# The first example publishes the 1.3 version of both the PDF and HTML versions of
|
||||
# the BSP Guide. The second example publishes the 1.3 version of both the PDF and
|
||||
# The first example publishes the 1.2 version of both the PDF and HTML versions of
|
||||
# the BSP Guide. The second example publishes the 1.2 version of both the PDF and
|
||||
# HTML versions of the ADT Manual. The third example publishes the PDF and HTML
|
||||
# 'edison' versions of the YP Development Manual. The fourth example publishes
|
||||
# the PDF and HTML 'denzil' versions of the YP Development Manual.
|
||||
# 'edison' versions of the YP Development Manual. Finally, the last example publishes
|
||||
# the PDF and HTML 'master' versions of the YP Development Manual.
|
||||
#
|
||||
|
||||
ifeq ($(DOC),bsp-guide)
|
||||
XSLTOPTS = --stringparam html.stylesheet bsp-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = bsp-style.css bsp-guide.html bsp-guide.pdf figures/bsp-title.png
|
||||
TARFILES = style.css bsp-guide.html bsp-guide.pdf figures/bsp-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
@@ -84,7 +73,7 @@ STYLESHEET = $(DOC)/*.css
|
||||
endif
|
||||
|
||||
ifeq ($(DOC),dev-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet dev-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
@@ -93,12 +82,11 @@ ALLPREQ = html pdf tarball
|
||||
#
|
||||
# Note that the tarfile might produce the "Cannot stat: No such file or directory" error
|
||||
# message for .PNG files that are not present when building a particular branch. The
|
||||
# list of files is all-inclusive for all branches. Note, if you don't provide a BRANCH
|
||||
# option, it defaults to the latest stuff. This would be appropriate for "master" branch.
|
||||
# list of files is all-inclusive for all branches.
|
||||
#
|
||||
|
||||
ifeq ($(BRANCH),edison)
|
||||
TARFILES = dev-style.css dev-manual.html dev-manual.pdf \
|
||||
TARFILES = style.css dev-manual.html dev-manual.pdf \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos-edison.png \
|
||||
@@ -106,21 +94,15 @@ TARFILES = dev-style.css dev-manual.html dev-manual.pdf \
|
||||
figures/kernel-overview-3-edison.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
else ifeq ($(BRANCH),denzil)
|
||||
TARFILES = dev-style.css dev-manual.html dev-manual.pdf \
|
||||
else
|
||||
TARFILES = style.css dev-manual.html dev-manual.pdf \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos-denzil.png \
|
||||
figures/kernel-example-repos.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3-denzil.png \
|
||||
figures/kernel-overview-3.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
else
|
||||
TARFILES = dev-style.css dev-manual.html dev-manual.pdf \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2-generic.png \
|
||||
figures/source-repos.png figures/yp-download.png
|
||||
endif
|
||||
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
@@ -130,76 +112,24 @@ STYLESHEET = $(DOC)/*.css
|
||||
endif
|
||||
|
||||
ifeq ($(DOC),yocto-project-qs)
|
||||
XSLTOPTS = --stringparam html.stylesheet qs-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
TARFILES = yocto-project-qs.html qs-style.css figures/yocto-environment.png figures/building-an-image.png figures/using-a-pre-built-image.png figures/yocto-project-transp.png
|
||||
TARFILES = yocto-project-qs.html style.css figures/yocto-environment.png figures/building-an-image.png figures/using-a-pre-built-image.png figures/yocto-project-transp.png
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
endif
|
||||
|
||||
ifeq ($(DOC),mega-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet mega-style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html tarball
|
||||
|
||||
ifeq ($(BRANCH),edison)
|
||||
TARFILES = mega-manual.html mega-style.css figures/yocto-environment.png figures/building-an-image.png \
|
||||
figures/using-a-pre-built-image.png \
|
||||
figures/poky-title.png \
|
||||
figures/adt-title.png figures/bsp-title.png \
|
||||
figures/kernel-title.png figures/kernel-architecture-overview.png \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos-edison.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3-edison.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
else ifeq ($(BRANCH),denzil)
|
||||
TARFILES = mega-manual.html mega-style.css figures/yocto-environment.png figures/building-an-image.png \
|
||||
figures/using-a-pre-built-image.png \
|
||||
figures/poky-title.png \
|
||||
figures/adt-title.png figures/bsp-title.png \
|
||||
figures/kernel-title.png figures/kernel-architecture-overview.png \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-example-repos-denzil.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2.png \
|
||||
figures/kernel-overview-3-denzil.png \
|
||||
figures/source-repos.png figures/yp-download.png \
|
||||
figures/wip.png
|
||||
else
|
||||
TARFILES = mega-manual.html mega-style.css figures/yocto-environment.png figures/building-an-image.png \
|
||||
figures/using-a-pre-built-image.png \
|
||||
figures/poky-title.png \
|
||||
figures/adt-title.png figures/bsp-title.png \
|
||||
figures/kernel-title.png figures/kernel-architecture-overview.png \
|
||||
figures/app-dev-flow.png figures/bsp-dev-flow.png figures/dev-title.png \
|
||||
figures/git-workflow.png figures/index-downloads.png figures/kernel-dev-flow.png \
|
||||
figures/kernel-overview-1.png figures/kernel-overview-2-generic.png \
|
||||
figures/source-repos.png figures/yp-download.png
|
||||
endif
|
||||
|
||||
MANUALS = $(DOC)/$(DOC).html
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
|
||||
endif
|
||||
|
||||
ifeq ($(DOC),poky-ref-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet ref-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam appendix.autolabel A \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = poky-ref-manual.html ref-style.css figures/poky-title.png
|
||||
TARFILES = poky-ref-manual.html style.css figures/poky-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
@@ -207,28 +137,28 @@ endif
|
||||
|
||||
|
||||
ifeq ($(DOC),adt-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet adt-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam appendix.autolabel A \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = adt-manual.html adt-manual.pdf adt-style.css figures/adt-title.png
|
||||
TARFILES = adt-manual.html adt-manual.pdf style.css figures/adt-title.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
endif
|
||||
|
||||
ifeq ($(DOC),kernel-manual)
|
||||
XSLTOPTS = --stringparam html.stylesheet kernel-style.css \
|
||||
XSLTOPTS = --stringparam html.stylesheet style.css \
|
||||
--stringparam chapter.autolabel 1 \
|
||||
--stringparam appendix.autolabel A \
|
||||
--stringparam section.autolabel 1 \
|
||||
--stringparam section.label.includes.component.label 1 \
|
||||
--xinclude
|
||||
ALLPREQ = html pdf tarball
|
||||
TARFILES = kernel-manual.html kernel-manual.pdf kernel-style.css figures/kernel-title.png figures/kernel-architecture-overview.png
|
||||
TARFILES = kernel-manual.html kernel-manual.pdf style.css figures/kernel-title.png figures/kernel-architecture-overview.png
|
||||
MANUALS = $(DOC)/$(DOC).html $(DOC)/$(DOC).pdf
|
||||
FIGURES = figures
|
||||
STYLESHEET = $(DOC)/*.css
|
||||
@@ -246,47 +176,17 @@ all: $(ALLPREQ)
|
||||
pdf:
|
||||
ifeq ($(DOC),yocto-project-qs)
|
||||
@echo " "
|
||||
@echo "ERROR: You cannot generate a yocto-project-qs PDF file."
|
||||
@echo "ERROR: You cannot generate a PDF file for the Yocto Project Quick Start"
|
||||
@echo " "
|
||||
|
||||
else ifeq ($(DOC),mega-manual)
|
||||
@echo " "
|
||||
@echo "ERROR: You cannot generate a mega-manual PDF file."
|
||||
@echo " "
|
||||
|
||||
else
|
||||
|
||||
cd $(DOC); ../tools/poky-docbook-to-pdf $(DOC).xml ../template; cd ..
|
||||
endif
|
||||
|
||||
html:
|
||||
ifeq ($(DOC),mega-manual)
|
||||
# See http://www.sagehill.net/docbookxsl/HtmlOutput.html
|
||||
@echo " "
|
||||
@echo "******** Building "$(DOC)
|
||||
@echo " "
|
||||
cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd ..
|
||||
@echo " "
|
||||
@echo "******** Using mega-manual.sed to process external links"
|
||||
@echo " "
|
||||
cd $(DOC); sed -f ../tools/mega-manual.sed < mega-manual.html > mega-output.html; cd ..
|
||||
@echo " "
|
||||
@echo "******** Cleaning up transient file mega-output.html"
|
||||
@echo " "
|
||||
cd $(DOC); rm mega-manual.html; mv mega-output.html mega-manual.html; cd ..
|
||||
else
|
||||
# See http://www.sagehill.net/docbookxsl/HtmlOutput.html
|
||||
@echo " "
|
||||
@echo "******** Building "$(DOC)
|
||||
@echo " "
|
||||
cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd ..
|
||||
endif
|
||||
|
||||
|
||||
tarball: html
|
||||
@echo " "
|
||||
@echo "******** Creating Tarball of document files"
|
||||
@echo " "
|
||||
cd $(DOC); tar -cvzf $(DOC).tgz $(TARFILES); cd ..
|
||||
|
||||
validate:
|
||||
@@ -294,18 +194,8 @@ validate:
|
||||
|
||||
|
||||
publish:
|
||||
@if test -f $(DOC)/$(DOC).html; \
|
||||
then \
|
||||
echo " "; \
|
||||
echo "******** Publishing "$(DOC)".html"; \
|
||||
echo " "; \
|
||||
scp -r $(MANUALS) $(STYLESHEET) www.yoctoproject.org:/srv/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
||||
cd $(DOC); scp -r $(FIGURES) www.yoctoproject.org:/srv/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
||||
else \
|
||||
echo " "; \
|
||||
echo $(DOC)".html missing. Generate the file first then try again."; \
|
||||
echo " "; \
|
||||
fi
|
||||
scp -r $(MANUALS) $(STYLESHEET) www.yoctoproject.org:/srv/www/www.yoctoproject.org-docs/$(VER)/$(DOC)
|
||||
cd $(DOC); scp -r $(FIGURES) www.yoctoproject.org:/srv/www/www.yoctoproject.org-docs/$(VER)/$(DOC)/figures
|
||||
|
||||
clean:
|
||||
rm -f $(MANUALS); rm $(DOC)/$(DOC).*tgz;
|
||||
rm -f $(MANUALS)
|
||||
|
||||
@@ -41,22 +41,8 @@ Folders exist for individual manuals as follows:
|
||||
* kernel-manual - The Yocto Project Kernel Architecture and Use Manual
|
||||
* poky-ref-manual - The Yocto Project Reference Manual
|
||||
* yocto-project-qs - The Yocto Project Quick Start
|
||||
* mega-manual - The aggregated manual comprised of all YP manuals and guides
|
||||
|
||||
Each folder is self-contained regarding content and figures. Note that there
|
||||
is a sed file needed to process the links of the mega-manual. The sed file
|
||||
is located in the tools directory. Also note that the figures folder in the
|
||||
mega-manual directory contains duplicates of all the figures in the YP folders
|
||||
directories for all YP manuals and guides.
|
||||
|
||||
If you want to find HTML versions of the Yocto Project manuals on the web,
|
||||
go to http://www.yoctoproject.org and click on the "Documentation" tab. From
|
||||
there you have access to archived documentation from previous releases, current
|
||||
documentation for the latest release, and "Docs in Progress" for the release
|
||||
currently being developed.
|
||||
|
||||
In general, the Yocto Project site (http://www.yoctoproject.org) is a great
|
||||
reference for both information and downloads.
|
||||
Each folder is self-contained regarding content and figures.
|
||||
|
||||
Makefile
|
||||
========
|
||||
@@ -85,10 +71,7 @@ Contains various templates, fonts, and some old PNG files.
|
||||
|
||||
tools
|
||||
=====
|
||||
Contains a tool to convert the DocBook files to PDF format. This folder also
|
||||
contains the mega-manual.sed file, which is used by Makefile to process
|
||||
cross-references from within the manual that normally go to an external
|
||||
manual.
|
||||
Contains a tool to convert the DocBook files to PDF format.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@
|
||||
<para>
|
||||
Recall that earlier the manual discussed how to use an existing toolchain
|
||||
tarball that had been installed into <filename>/opt/poky</filename>,
|
||||
which is outside of the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>
|
||||
(see the section "<link linkend='using-an-existing-toolchain-tarball'>Using a Cross-Toolchain Tarball)</link>".
|
||||
which is outside of the Yocto Project build tree
|
||||
(see the section "<link linkend='using-an-existing-toolchain-tarball'>Using an Existing
|
||||
Toolchain Tarball)</link>".
|
||||
And, that sourcing your architecture-specific environment setup script
|
||||
initializes a suitable cross-toolchain development environment.
|
||||
During the setup, locations for the compiler, QEMU scripts, QEMU binary,
|
||||
@@ -21,7 +21,7 @@
|
||||
for example, <filename>configure.sh</filename> can find pre-generated
|
||||
test results for tests that need target hardware on which to run.
|
||||
These conditions allow you to easily use the toolchain outside of the
|
||||
OpenEmbedded build environment on both autotools-based projects and
|
||||
Yocto Project build environment on both autotools-based projects and
|
||||
Makefile-based projects.
|
||||
</para>
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
For an Autotools-based project, you can use the cross-toolchain by just
|
||||
passing the appropriate host option to <filename>configure.sh</filename>.
|
||||
The host option you use is derived from the name of the environment setup
|
||||
script in <filename>/opt/poky</filename> resulting from installation of the
|
||||
script in <filename>/opt/poky</filename> resulting from unpacking the
|
||||
cross-toolchain tarball.
|
||||
For example, the host option for an ARM-based target that uses the GNU EABI
|
||||
is <filename>armv5te-poky-linux-gnueabi</filename>.
|
||||
|
||||
736
documentation/adt-manual/adt-eclipse.xml
Normal file
@@ -0,0 +1,736 @@
|
||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-eclipse'>
|
||||
<title>Working Within Eclipse</title>
|
||||
|
||||
<para>
|
||||
The Eclipse IDE is a popular development environment and it fully supports
|
||||
development using Yocto Project.
|
||||
When you install and configure the Eclipse Yocto Project Plug-in into
|
||||
the Eclipse IDE, you maximize your Yocto Project design experience.
|
||||
Installing and configuring the Plug-in results in an environment that
|
||||
has extensions specifically designed to let you more easily develop software.
|
||||
These extensions allow for cross-compilation, deployment, and execution of
|
||||
your output into a QEMU emulation session.
|
||||
You can also perform cross-debugging and profiling.
|
||||
The environment also supports a suite of tools that allows you to perform
|
||||
remote profiling, tracing, collection of power data, collection of
|
||||
latency data, and collection of performance data.
|
||||
</para>
|
||||
<para>
|
||||
This section describes how to install and configure the Eclipse IDE
|
||||
Yocto Plug-in and how to use it to develop your Yocto Project.
|
||||
</para>
|
||||
|
||||
<section id='setting-up-the-eclipse-ide'>
|
||||
<title>Setting Up the Eclipse IDE</title>
|
||||
|
||||
<para>
|
||||
To develop within the Eclipse IDE, you need to do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Install the optimal version of the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>Configure the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>Install the Eclipse Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>Configure the Eclipse Yocto Plug-in.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<section id='installing-eclipse-ide'>
|
||||
<title>Installing the Eclipse IDE</title>
|
||||
|
||||
<para>
|
||||
It is recommended that you have the Indigo 3.7.2 version of the
|
||||
Eclipse IDE installed on your development system.
|
||||
If you don’t have this version, you can find it at
|
||||
<ulink url='&ECLIPSE_MAIN_URL;'></ulink>.
|
||||
From that site, choose the Eclipse Classic version particular to your development
|
||||
host.
|
||||
This version contains the Eclipse Platform, the Java Development
|
||||
Tools (JDT), and the Plug-in Development Environment.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once you have downloaded the tarball, extract it into a clean
|
||||
directory.
|
||||
For example, the following commands unpack and install the Eclipse IDE
|
||||
tarball found in the <filename>Downloads</filename> area
|
||||
into a clean directory using the default name <filename>eclipse</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~
|
||||
$ tar -xzvf ~/Downloads/eclipse-SDK-3.7.1-linux-gtk-x86_64.tar.gz
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
One issue exists that you need to be aware of regarding the Java
|
||||
Virtual machine’s garbage collection (GC) process.
|
||||
The GC process does not clean up the permanent generation
|
||||
space (PermGen).
|
||||
This space stores metadata descriptions of classes.
|
||||
The default value is set too small and it could trigger an
|
||||
out-of-memory error such as the following:
|
||||
<literallayout class='monospaced'>
|
||||
Java.lang.OutOfMemoryError: PermGen space
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This error causes the application to hang.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To fix this issue, you can use the <filename>--vmargs</filename>
|
||||
option when you start Eclipse to increase the size of the permanent generation space:
|
||||
<literallayout class='monospaced'>
|
||||
eclipse --vmargs --XX:PermSize=256M
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='configuring-the-eclipse-ide'>
|
||||
<title>Configuring the Eclipse IDE</title>
|
||||
|
||||
<para>
|
||||
Before installing and configuring the Eclipse Yocto Plug-in, you need to configure
|
||||
the Eclipse IDE.
|
||||
Follow these general steps to configure Eclipse:
|
||||
<orderedlist>
|
||||
<listitem><para>Start the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>Make sure you are in your Workbench and select
|
||||
"Install New Software" from the "Help" pull-down menu.
|
||||
</para></listitem>
|
||||
<listitem><para>Select <filename>indigo - &ECLIPSE_INDIGO_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Expand the box next to <filename>Programming Languages</filename>
|
||||
and select the <filename>Autotools Support for CDT (incubation)</filename>
|
||||
and <filename>C/C++ Development Tools</filename> boxes.</para></listitem>
|
||||
<listitem><para>Expand the box next to "Linux Tools" and select the
|
||||
"LTTng - Linux Tracing Toolkit(incubation)" boxes.</para></listitem>
|
||||
<listitem><para>Complete the installation and restart the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>After the Eclipse IDE restarts and from the Workbench, select
|
||||
"Install New Software" from the "Help" pull-down menu.</para></listitem>
|
||||
<listitem><para>Click the
|
||||
"Available Software Sites" link.</para></listitem>
|
||||
<listitem><para>Check the box next to
|
||||
<filename>&ECLIPSE_UPDATES_URL;</filename>
|
||||
and click "OK".</para></listitem>
|
||||
<listitem><para>Select <filename>&ECLIPSE_UPDATES_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>TM and RSE Main Features</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para>Expand the box next to <filename>TM and RSE Optional Add-ons</filename>
|
||||
and select every item except <filename>RSE Unit Tests</filename> and
|
||||
<filename>RSE WinCE Services (incubation)</filename>.</para></listitem>
|
||||
<listitem><para>Complete the installation and restart the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>If necessary, select
|
||||
"Install New Software" from the "Help" pull-down menu so you can click the
|
||||
"Available Software Sites" link again.</para></listitem>
|
||||
<listitem><para>After clicking "Available Software Sites", check the box next to
|
||||
<filename>http://download.eclipse.org/tools/cdt/releases/indigo</filename>
|
||||
and click "OK".</para></listitem>
|
||||
<listitem><para>Select <filename>&ECLIPSE_INDIGO_CDT_URL;</filename>
|
||||
from the "Work with:" pull-down menu.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>CDT Main Features</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para>Expand the box next to <filename>CDT Optional Features</filename>
|
||||
and select <filename>C/C++ Remote Launch</filename> and
|
||||
<filename>Target Communication Framework (incubation)</filename>.</para></listitem>
|
||||
<listitem><para>Complete the installation and restart the Eclipse IDE.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='installing-the-eclipse-yocto-plug-in'>
|
||||
<title>Installing or Accessing the Eclipse Yocto Plug-in</title>
|
||||
|
||||
<para>
|
||||
You can install the Eclipse Yocto Plug-in into the Eclipse IDE
|
||||
one of two ways: use the Yocto Project update site to install the pre-built plug-in,
|
||||
or build and install the plug-in from the latest source code.
|
||||
If you don't want to permanently install the plug-in but just want to try it out
|
||||
within the Eclipse environment, you can import the plug-in project from the
|
||||
Yocto Project source repositories.
|
||||
</para>
|
||||
|
||||
<section id='new-software'>
|
||||
<title>Installing the Pre-built Plug-in from the Yocto Project Eclipse Update Site</title>
|
||||
|
||||
<para>
|
||||
To install the Eclipse Yocto Plug-in from the update site,
|
||||
follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Start up the Eclipse IDE.</para></listitem>
|
||||
<listitem><para>In Eclipse, select "Install New Software" from the "Help" menu.</para></listitem>
|
||||
<listitem><para>Click "Add..." in the "Work with:" area.</para></listitem>
|
||||
<listitem><para>Enter
|
||||
<filename>&ECLIPSE_DL_PLUGIN_URL;</filename>
|
||||
in the URL field and provide a meaningful name in the "Name" field.</para></listitem>
|
||||
<listitem><para>Click "OK" to have the entry added to the "Work with:"
|
||||
drop-down list.</para></listitem>
|
||||
<listitem><para>Select the entry for the plug-in from the "Work with:" drop-down
|
||||
list.</para></listitem>
|
||||
<listitem><para>Check the box next to <filename>Development tools and SDKs for Yocto Linux</filename>.
|
||||
</para></listitem>
|
||||
<listitem><para>Complete the remaining software installation steps and
|
||||
then restart the Eclipse IDE to finish the installation of the plug-in.
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='zip-file-method'>
|
||||
<title>Installing the Plug-in Using the Latest Source Code</title>
|
||||
<para>
|
||||
To install the Eclipse Yocto Plug-in from the latest source code, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Open a shell and create a Git repository with:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/eclipse-poky yocto-eclipse
|
||||
</literallayout>
|
||||
For this example, the repository is named
|
||||
<filename>~/yocto-eclipse</filename>.</para></listitem>
|
||||
<listitem><para>Locate the <filename>build.sh</filename> script in the
|
||||
Git repository you created in the previous step.
|
||||
The script is located in the <filename>scripts</filename>.</para></listitem>
|
||||
<listitem><para>Be sure to set and export the <filename>ECLIPSE_HOME</filename> environment
|
||||
variable to the top-level directory in which you installed the Indigo
|
||||
version of Eclipse.
|
||||
For example, if your Eclipse directory is <filename>$HOME/eclipse</filename>,
|
||||
use the following:
|
||||
<literallayout class='monospaced'>
|
||||
$ export ECLIPSE_HOME=$HOME/eclipse
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para>Run the <filename>build.sh</filename> script and provide the
|
||||
name of the Git branch along with the Yocto Project release you are
|
||||
using.
|
||||
Here is an example that uses the <filename>master</filename> Git repository
|
||||
and the <filename>1.1M4</filename> release:
|
||||
<literallayout class='monospaced'>
|
||||
$ scripts/build.sh master 1.1M4
|
||||
</literallayout>
|
||||
After running the script, the file
|
||||
<filename>org.yocto.sdk-<release>-<date>-archive.zip</filename>
|
||||
is in the current directory.</para></listitem>
|
||||
<listitem><para>If necessary, start the Eclipse IDE and be sure you are in the
|
||||
Workbench.</para></listitem>
|
||||
<listitem><para>Select "Install New Software" from the "Help" pull-down menu.
|
||||
</para></listitem>
|
||||
<listitem><para>Click "Add".</para></listitem>
|
||||
<listitem><para>Provide anything you want in the "Name" field.</para></listitem>
|
||||
<listitem><para>Click "Archive" and browse to the ZIP file you built
|
||||
in step four.
|
||||
This ZIP file should not be "unzipped", and must be the
|
||||
<filename>*archive.zip</filename> file created by running the
|
||||
<filename>build.sh</filename> script.</para></listitem>
|
||||
<listitem><para>Check the box next to the new entry in the installation window and complete
|
||||
the installation.</para></listitem>
|
||||
<listitem><para>Restart the Eclipse IDE if necessary.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At this point you should be able to configure the Eclipse Yocto Plug-in as described in the
|
||||
"<link linkend='configuring-the-eclipse-yocto-plug-in'>Configuring the Eclipse Yocto Plug-in</link>"
|
||||
section.</para>
|
||||
</section>
|
||||
|
||||
<section id='yocto-project-source'>
|
||||
<title>Importing the Plug-in Project into the Eclipse Environment</title>
|
||||
<para>
|
||||
Importing the Eclipse Yocto Plug-in project from the Yocto Project source repositories
|
||||
is useful when you want to try out the latest plug-in from the tip of plug-in's
|
||||
development tree.
|
||||
It is important to understand when you import the plug-in you are not installing
|
||||
it into the Eclipse application.
|
||||
Rather, you are importing the project and just using it.
|
||||
To import the plug-in project, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Open a shell and create a Git repository with:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/eclipse-poky yocto-eclipse
|
||||
</literallayout>
|
||||
For this example, the repository is named
|
||||
<filename>~/yocto-eclipse</filename>.</para></listitem>
|
||||
<listitem><para>In Eclipse, select "Import" from the "File" menu.</para></listitem>
|
||||
<listitem><para>Expand the "General" box and select "existing projects into workspace"
|
||||
and then click "Next".</para></listitem>
|
||||
<listitem><para>Select the root directory and browse to
|
||||
<filename>~/yocto-eclipse/plugins</filename>.</para></listitem>
|
||||
<listitem><para>Three plug-ins exist: "org.yocto.bc.ui", "org.yocto.sdk.ide", and
|
||||
"org.yocto.sdk.remotetools".
|
||||
Select and import all of them.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The left navigation pane in the Eclipse application shows the default projects.
|
||||
Right-click on one of these projects and run it as an Eclipse application.
|
||||
This brings up a second instance of Eclipse IDE that has the Yocto Plug-in.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='configuring-the-eclipse-yocto-plug-in'>
|
||||
<title>Configuring the Eclipse Yocto Plug-in</title>
|
||||
|
||||
<para>
|
||||
Configuring the Eclipse Yocto Plug-in involves setting the Cross
|
||||
Compiler options and the Target options.
|
||||
The configurations you choose become the default settings for all projects.
|
||||
You do have opportunities to change them later when
|
||||
you configure the project (see the following section).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To start, you need to do the following from within the Eclipse IDE:
|
||||
<itemizedlist>
|
||||
<listitem><para>Choose <filename>Windows -> Preferences</filename> to display
|
||||
the <filename>Preferences</filename> Dialog</para></listitem>
|
||||
<listitem><para>Click <filename>Yocto ADT</filename></para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<section id='configuring-the-cross-compiler-options'>
|
||||
<title>Configuring the Cross-Compiler Options</title>
|
||||
|
||||
<para>
|
||||
To configure the Cross Compiler Options, you must select the type of toolchain,
|
||||
point to the toolchain, specify the sysroot location, and select the target architecture.
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Selecting the Toolchain Type:</emphasis>
|
||||
Choose between <filename>Standalone pre-built toolchain</filename>
|
||||
and <filename>Build system derived toolchain</filename> for Cross
|
||||
Compiler Options.
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>
|
||||
<filename>Standalone Pre-built Toolchain:</filename></emphasis>
|
||||
Select this mode when you are using a stand-alone cross-toolchain.
|
||||
For example, suppose you are an application developer and do not
|
||||
need to build a target image.
|
||||
Instead, you just want to use an architecture-specific toolchain on an
|
||||
existing kernel and target root filesystem.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<filename>Build System Derived Toolchain:</filename></emphasis>
|
||||
Select this mode if the cross-toolchain has been installed and built
|
||||
as part of the Yocto Project build tree.
|
||||
When you select <filename>Build system derived toolchain</filename>,
|
||||
you are using the toolchain bundled
|
||||
inside the Yocto Project build tree.
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Point to the Toolchain:</emphasis>
|
||||
If you are using a stand-alone pre-built toolchain, you should be pointing to the
|
||||
<filename>/opt/poky/1.1</filename> directory.
|
||||
This is the location for toolchains installed by the ADT Installer or by hand.
|
||||
Sections "<link linkend='configuring-and-running-the-adt-installer-script'>Configuring
|
||||
and Running the ADT Installer Script</link>" and
|
||||
"<link linkend='using-an-existing-toolchain-tarball'>Using a Cross-Toolchain
|
||||
Tarball</link>" describe two ways to install
|
||||
a stand-alone cross-toolchain in the
|
||||
<filename>/opt/poky</filename> directory.
|
||||
<note>It is possible to install a stand-alone cross-toolchain in a directory
|
||||
other than <filename>/opt/poky</filename>.
|
||||
However, doing so is discouraged.</note></para>
|
||||
<para>If you are using a system-derived toolchain, the path you provide
|
||||
for the <filename>Toolchain Root Location</filename>
|
||||
field is the Yocto Project's build directory.
|
||||
See section "<link linkend='using-the-toolchain-from-within-the-build-tree'>Using
|
||||
BitBake and the Yocto Project Build Tree</link>" for
|
||||
information on how to install the toolchain into the Yocto
|
||||
Project build tree.</para></listitem>
|
||||
<listitem><para><emphasis>Specify the Sysroot Location:</emphasis>
|
||||
This location is where the root filesystem for the
|
||||
target hardware is created on the development system by the ADT Installer.
|
||||
The QEMU user-space tools, the
|
||||
NFS boot process, and the cross-toolchain all use the sysroot location.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Select the Target Architecture:</emphasis>
|
||||
The target architecture is the type of hardware you are
|
||||
going to use or emulate.
|
||||
Use the pull-down <filename>Target Architecture</filename> menu to make
|
||||
your selection.
|
||||
The pull-down menu should have the supported architectures.
|
||||
If the architecture you need is not listed in the menu, you
|
||||
will need to build the image.
|
||||
See the "<ulink url='&YOCTO_DOCS_QS_URL;#building-image'>Building an Image</ulink>" section
|
||||
of The Yocto Project Quick Start for more information.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='configuring-the-target-options'>
|
||||
<title>Configuring the Target Options</title>
|
||||
|
||||
<para>
|
||||
You can choose to emulate hardware using the QEMU emulator, or you
|
||||
can choose to run your image on actual hardware.
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>QEMU:</filename></emphasis> Select this option if
|
||||
you will be using the QEMU emulator.
|
||||
If you are using the emulator, you also need to locate the kernel
|
||||
and specify any custom options.</para>
|
||||
<para>If you selected <filename>Build system derived toolchain</filename>,
|
||||
the target kernel you built will be located in the
|
||||
Yocto Project build tree in <filename>tmp/deploy/images</filename> directory.
|
||||
If you selected <filename>Standalone pre-built toolchain</filename>, the
|
||||
pre-built image you downloaded is located
|
||||
in the directory you specified when you downloaded the image.</para>
|
||||
<para>Most custom options are for advanced QEMU users to further
|
||||
customize their QEMU instance.
|
||||
These options are specified between paired angled brackets.
|
||||
Some options must be specified outside the brackets.
|
||||
In particular, the options <filename>serial</filename>,
|
||||
<filename>nographic</filename>, and <filename>kvm</filename> must all
|
||||
be outside the brackets.
|
||||
Use the <filename>man qemu</filename> command to get help on all the options
|
||||
and their use.
|
||||
The following is an example:
|
||||
<literallayout class='monospaced'>
|
||||
serial ‘<-m 256 -full-screen>’
|
||||
</literallayout></para>
|
||||
<para>
|
||||
Regardless of the mode, Sysroot is already defined as part of the
|
||||
Cross Compiler Options configuration in the
|
||||
<filename>Sysroot Location:</filename> field.</para></listitem>
|
||||
<listitem><para><emphasis><filename>External HW:</filename></emphasis> Select this option
|
||||
if you will be using actual hardware.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Click the <filename>OK</filename> button to save your plug-in configurations.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='creating-the-project'>
|
||||
<title>Creating the Project</title>
|
||||
|
||||
<para>
|
||||
You can create two types of projects: Autotools-based, or Makefile-based.
|
||||
This section describes how to create Autotools-based projects from within
|
||||
the Eclipse IDE.
|
||||
For information on creating Makefile-based projects in a terminal window, see the section
|
||||
"<link linkend='using-the-command-line'>Using the Command Line</link>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To create a project based on a Yocto template and then display the source code,
|
||||
follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Select <filename>File -> New -> Project</filename>.</para></listitem>
|
||||
<listitem><para>Double click <filename>CC++</filename>.</para></listitem>
|
||||
<listitem><para>Double click <filename>C Project</filename> to create the project.</para></listitem>
|
||||
<listitem><para>Expand <filename>Yocto ADT Project</filename>.</para></listitem>
|
||||
<listitem><para>Select <filename>Hello World ANSI C Autotools Project</filename>.
|
||||
This is an Autotools-based project based on a Yocto Project template.</para></listitem>
|
||||
<listitem><para>Put a name in the <filename>Project name:</filename> field.
|
||||
Do not use hyphens as part of the name.</para></listitem>
|
||||
<listitem><para>Click <filename>Next</filename>.</para></listitem>
|
||||
<listitem><para>Add information in the <filename>Author</filename> and
|
||||
<filename>Copyright notice</filename> fields.</para></listitem>
|
||||
<listitem><para>Be sure the <filename>License</filename> field is correct.</para></listitem>
|
||||
<listitem><para>Click <filename>Finish</filename>.</para></listitem>
|
||||
<listitem><para>If the "open perspective" prompt appears, click "Yes" so that you
|
||||
in the C/C++ perspective.</para></listitem>
|
||||
<listitem><para>The left-hand navigation pane shows your project.
|
||||
You can display your source by double clicking the project's source file.
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='configuring-the-cross-toolchains'>
|
||||
<title>Configuring the Cross-Toolchains</title>
|
||||
|
||||
<para>
|
||||
The earlier section, "<link linkend='configuring-the-eclipse-yocto-plug-in'>Configuring
|
||||
the Eclipse Yocto Plug-in</link>", sets up the default project
|
||||
configurations.
|
||||
You can override these settings for a given project by following these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Select <filename>Project -> Change Yocto Project Settings</filename>:
|
||||
This selection brings up the <filename>Project Yocto Settings</filename> Dialog
|
||||
and allows you to make changes specific to an individual project.
|
||||
</para>
|
||||
<para>By default, the Cross Compiler Options and Target Options for a project
|
||||
are inherited from settings you provide using the <filename>Preferences</filename>
|
||||
Dialog as described earlier
|
||||
in the "<link linkend='configuring-the-eclipse-yocto-plug-in'>Configuring the Eclipse
|
||||
Yocto Plug-in</link>" section.
|
||||
The <filename>Project Yocto Settings</filename>
|
||||
Dialog allows you to override those default settings
|
||||
for a given project.</para></listitem>
|
||||
<listitem><para>Make your configurations for the project and click "OK".</para></listitem>
|
||||
<listitem><para>Select <filename>Project -> Reconfigure Project</filename>:
|
||||
This selection reconfigures the project by running
|
||||
<filename>autogen.sh</filename> in the workspace for your project.
|
||||
The script also runs <filename>libtoolize</filename>, <filename>aclocal</filename>,
|
||||
<filename>autoconf</filename>, <filename>autoheader</filename>,
|
||||
<filename>automake --a</filename>, and
|
||||
<filename>./configure</filename>.
|
||||
Click on the <filename>Console</filename> tab beneath your source code to
|
||||
see the results of reconfiguring your project.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='building-the-project'>
|
||||
<title>Building the Project</title>
|
||||
|
||||
<para>
|
||||
To build the project, select <filename>Project -> Build Project</filename>.
|
||||
The console should update and you can note the cross-compiler you are using.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='starting-qemu-in-user-space-nfs-mode'>
|
||||
<title>Starting QEMU in User Space NFS Mode</title>
|
||||
|
||||
<para>
|
||||
To start the QEMU emulator from within Eclipse, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Expose the <filename>Run -> External Tools</filename> menu.
|
||||
Your image should appear as a selectable menu item.
|
||||
</para></listitem>
|
||||
<listitem><para>Select your image from the menu to launch the
|
||||
emulator in a new window.</para></listitem>
|
||||
<listitem><para>If needed, enter your host root password in the shell window at the prompt.
|
||||
This sets up a <filename>Tap 0</filename> connection needed for running in user-space
|
||||
NFS mode.</para></listitem>
|
||||
<listitem><para>Wait for QEMU to launch.</para></listitem>
|
||||
<listitem><para>Once QEMU launches, you can begin operating within that
|
||||
environment.
|
||||
For example, you could determine the IP Address
|
||||
for the user-space NFS by using the <filename>ifconfig</filename> command.
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='deploying-and-debugging-the-application'>
|
||||
<title>Deploying and Debugging the Application</title>
|
||||
|
||||
<para>
|
||||
Once the QEMU emulator is running the image, using the Eclipse IDE
|
||||
you can deploy your application and use the emulator to perform debugging.
|
||||
Follow these steps to deploy the application.
|
||||
<orderedlist>
|
||||
<listitem><para>Select <filename>Run -> Debug Configurations...</filename></para></listitem>
|
||||
<listitem><para>In the left area, expand <filename>C/C++Remote Application</filename>.</para></listitem>
|
||||
<listitem><para>Locate your project and select it to bring up a new
|
||||
tabbed view in the <filename>Debug Configurations</filename> Dialog.</para></listitem>
|
||||
<listitem><para>Enter the absolute path into which you want to deploy
|
||||
the application.
|
||||
Use the <filename>Remote Absolute File Path for C/C++Application:</filename> field.
|
||||
For example, enter <filename>/usr/bin/<programname></filename>.</para></listitem>
|
||||
<listitem><para>Click on the <filename>Debugger</filename> tab to see the cross-tool debugger
|
||||
you are using.</para></listitem>
|
||||
<listitem><para>Click on the <filename>Main</filename> tab.</para></listitem>
|
||||
<listitem><para>Create a new connection to the QEMU instance
|
||||
by clicking on <filename>new</filename>.</para></listitem>
|
||||
<listitem><para>Select <filename>TCF</filename>, which means Target Communication
|
||||
Framework.</para></listitem>
|
||||
<listitem><para>Click <filename>Next</filename>.</para></listitem>
|
||||
<listitem><para>Clear out the <filename>host name</filename> field and enter the IP Address
|
||||
determined earlier.</para></listitem>
|
||||
<listitem><para>Click <filename>Finish</filename> to close the
|
||||
<filename>New Connections</filename> Dialog.</para></listitem>
|
||||
<listitem><para>Use the drop-down menu now in the <filename>Connection</filename> field and pick
|
||||
the IP Address you entered.</para></listitem>
|
||||
<listitem><para>Click <filename>Debug</filename> to bring up a login screen
|
||||
and login.</para></listitem>
|
||||
<listitem><para>Accept the debug perspective.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='running-user-space-tools'>
|
||||
<title>Running User-Space Tools</title>
|
||||
|
||||
<para>
|
||||
As mentioned earlier in the manual, several tools exist that enhance
|
||||
your development experience.
|
||||
These tools are aids in developing and debugging applications and images.
|
||||
You can run these user-space tools from within the Eclipse IDE through the
|
||||
<filename>YoctoTools</filename> menu.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once you pick a tool, you need to configure it for the remote target.
|
||||
Every tool needs to have the connection configured.
|
||||
You must select an existing TCF-based RSE connection to the remote target.
|
||||
If one does not exist, click <filename>New</filename> to create one.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here are some specifics about the remote tools:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis><filename>OProfile</filename>:</emphasis> Selecting this tool causes
|
||||
the <filename>oprofile-server</filename> on the remote target to launch on
|
||||
the local host machine.
|
||||
The <filename>oprofile-viewer</filename> must be installed on the local host machine and the
|
||||
<filename>oprofile-server</filename> must be installed on the remote target,
|
||||
respectively, in order to use.
|
||||
You must compile and install the <filename>oprofile-viewer</filename> from the source code
|
||||
on your local host machine.
|
||||
Furthermore, in order to convert the target's sample format data into a form that the
|
||||
host can use, you must have <filename>oprofile</filename> version 0.9.4 or
|
||||
greater installed on the host.</para>
|
||||
<para>You can locate both the viewer and server from
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/oprofileui/'></ulink>.
|
||||
<note>The <filename>oprofile-server</filename> is installed by default on
|
||||
the <filename>core-image-sato-sdk</filename> image.</note></para></listitem>
|
||||
<listitem><para><emphasis><filename>Lttng-ust</filename>:</emphasis> Selecting this tool runs
|
||||
<filename>usttrace</filename> on the remote target, transfers the output data back
|
||||
to the local host machine, and uses the <filename>lttng</filename> Eclipse plug-in to
|
||||
graphically display the output.
|
||||
For information on how to use <filename>lttng</filename> to trace an application, see
|
||||
<ulink url='http://lttng.org/files/ust/manual/ust.html'></ulink>.</para>
|
||||
<para>For <filename>Application</filename>, you must supply the absolute path name of the
|
||||
application to be traced by user mode <filename>lttng</filename>.
|
||||
For example, typing <filename>/path/to/foo</filename> triggers
|
||||
<filename>usttrace /path/to/foo</filename> on the remote target to trace the
|
||||
program <filename>/path/to/foo</filename>.</para>
|
||||
<para><filename>Argument</filename> is passed to <filename>usttrace</filename>
|
||||
running on the remote target.</para>
|
||||
<para>Before you use the <filename>lttng-ust</filename> tool, you need to setup
|
||||
the <filename>lttng</filename> Eclipse plug-in and create a <filename>lttng</filename>
|
||||
project.
|
||||
Do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Follow these
|
||||
<ulink url='http://wiki.eclipse.org/Linux_Tools_Project/LTTng#Downloading_and_installing_the_LTTng_parser_library'>instructions</ulink>
|
||||
to download and install the <filename>lttng</filename> parser library.
|
||||
</para></listitem>
|
||||
<listitem><para>Select <filename>Window -> Open Perspective -> Other</filename>
|
||||
and then select <filename>LTTng</filename>.</para></listitem>
|
||||
<listitem><para>Click <filename>OK</filename> to change the Eclipse perspective
|
||||
into the <filename>LTTng</filename> perspective.</para></listitem>
|
||||
<listitem><para>Create a new <filename>LTTng</filename> project by selecting
|
||||
<filename>File -> New -> Project</filename>.</para></listitem>
|
||||
<listitem><para>Choose <filename>LTTng -> LTTng Project</filename>.</para></listitem>
|
||||
<listitem><para>Click <filename>YoctoTools -> lttng-ust</filename> to start user mode
|
||||
<filename>lttng</filename> on the remote target.</para></listitem>
|
||||
</orderedlist></para>
|
||||
<para>After the output data has been transferred from the remote target back to the local
|
||||
host machine, new traces will be imported into the selected <filename>LTTng</filename> project.
|
||||
Then you can go to the <filename>LTTng</filename> project, right click the imported
|
||||
trace, and set the trace type as the <filename>LTTng</filename> kernel trace.
|
||||
Finally, right click the imported trace and select <filename>Open</filename>
|
||||
to display the data graphically.</para></listitem>
|
||||
<listitem><para><emphasis><filename>PowerTOP</filename>:</emphasis> Selecting this tool runs
|
||||
<filename>powertop</filename> on the remote target machine and displays the results in a
|
||||
new view called <filename>powertop</filename>.</para>
|
||||
<para><filename>Time to gather data(sec):</filename> is the time passed in seconds before data
|
||||
is gathered from the remote target for analysis.</para>
|
||||
<para><filename>show pids in wakeups list:</filename> corresponds to the
|
||||
<filename>-p</filename> argument
|
||||
passed to <filename>powertop</filename>.</para></listitem>
|
||||
<listitem><para><emphasis><filename>LatencyTOP and Perf</filename>:</emphasis>
|
||||
<filename>latencytop</filename> identifies system latency, while
|
||||
<filename>perf</filename> monitors the system's
|
||||
performance counter registers.
|
||||
Selecting either of these tools causes an RSE terminal view to appear
|
||||
from which you can run the tools.
|
||||
Both tools refresh the entire screen to display results while they run.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='customizing-an-image-using-a-bitbake-commander-project-and-hob'>
|
||||
<title>Customizing an Image Using a BitBake Commander Project and Hob</title>
|
||||
|
||||
<para>
|
||||
Within Eclipse, you can create a Yocto BitBake Commander project,
|
||||
edit the metadata, and then use the
|
||||
<ulink url='&YOCTO_HOME_URL;/projects/hob'>Hob</ulink> to build a customized
|
||||
image all within one IDE.
|
||||
</para>
|
||||
|
||||
<section id='creating-the-yocto-bitbake-commander-project'>
|
||||
<title>Creating the Yocto BitBake Commander Project</title>
|
||||
|
||||
<para>
|
||||
To create a Yocto BitBake Commander project, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Select <filename>Window -> Open Perspective -> Other</filename>
|
||||
and then choose <filename>Bitbake Commander</filename>.</para></listitem>
|
||||
<listitem><para>Click <filename>OK</filename> to change the Eclipse perspective into the
|
||||
Bitbake Commander perspective.</para></listitem>
|
||||
<listitem><para>Select <filename>File -> New -> Project</filename> to create a new Yocto
|
||||
Bitbake Commander project.</para></listitem>
|
||||
<listitem><para>Choose <filename>Yocto Project Bitbake Commander -> New Yocto Project</filename>
|
||||
and click <filename>Next</filename>.</para></listitem>
|
||||
<listitem><para>Enter the Project Name and choose the Project Location.
|
||||
The Yocto project's metadata files will be put under the directory
|
||||
<filename><project_location>/<project_name></filename>.
|
||||
If that directory does not exist, you need to check
|
||||
the "Clone from Yocto Git Repository" box, which would execute a
|
||||
<filename>git clone</filename> command to get the Yocto project's metadata files.
|
||||
</para></listitem>
|
||||
<listitem><para>Select <filename>Finish</filename> to create the project.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='editing-the-metadata-files'>
|
||||
<title>Editing the Metadata Files</title>
|
||||
|
||||
<para>
|
||||
After you create the Yocto Bitbake Commander project, you can modify the metadata files
|
||||
by opening them in the project.
|
||||
When editing recipe files (<filename>.bb</filename> files), you can view BitBake
|
||||
variable values and information by hovering the mouse pointer over the variable name and
|
||||
waiting a few seconds.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To edit the metadata, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Select your Yocto Bitbake Commander project.</para></listitem>
|
||||
<listitem><para>Select <filename>File -> New -> Yocto BitBake Commander -> BitBake Recipe</filename>
|
||||
to open a new recipe wizard.</para></listitem>
|
||||
<listitem><para>Point to your source by filling in the "SRC_URL" field.
|
||||
For example, you can add a recipe in the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#yocto-project-source-files'>Yocto Project Source Files</ulink>,
|
||||
input the "SRC_URL" as follows:
|
||||
<literallayout class='monospaced'>
|
||||
ftp://ftp.gnu.org/gnu/m4/m4-1.4.9.tar.gz
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para>Click "Populate" to calculate the archive md5, sha256,
|
||||
license checksum values and to auto-generate the recipe filename.</para></listitem>
|
||||
<listitem><para>Fill in the "Description" field.</para></listitem>
|
||||
<listitem><para>Be sure values for all required fields exist.</para></listitem>
|
||||
<listitem><para>Click <filename>Finish</filename>.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='buiding-and-customizing-the-image'>
|
||||
<title>Building and Customizing the Image</title>
|
||||
|
||||
<para>
|
||||
To build and customize the image in Eclipse, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Select your Yocto Bitbake Commander project.</para></listitem>
|
||||
<listitem><para>Select <filename>Project -> Launch HOB</filename>.</para></listitem>
|
||||
<listitem><para>Enter the build directory where you want to put your final images.</para></listitem>
|
||||
<listitem><para>Click <filename>OK</filename> to launch Hob.</para></listitem>
|
||||
<listitem><para>Use Hob to customize and build your own images.
|
||||
For information on Hob, see the
|
||||
<ulink url='&YOCTO_HOME_URL;/projects/hob'>Hob Project Page</ulink> on the
|
||||
Yocto Project website.</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</chapter>
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||
@@ -3,61 +3,54 @@
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<chapter id='adt-intro'>
|
||||
<title>Introduction</title>
|
||||
|
||||
<title>Application Development Toolkit (ADT) User's Guide</title>
|
||||
|
||||
<para>
|
||||
Welcome to the Yocto Project Application Developer's Guide.
|
||||
This manual provides information that lets you begin developing applications
|
||||
using the Yocto Project.
|
||||
Welcome to the Application Development Toolkit User’s Guide. This manual provides
|
||||
information that lets you get going with the ADT to develop projects using the Yocto
|
||||
Project.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Yocto Project provides an application development environment based on
|
||||
an Application Development Toolkit (ADT) and the availability of stand-alone
|
||||
cross-development toolchains and other tools.
|
||||
This manual describes the ADT and how you can configure and install it,
|
||||
how to access and use the cross-development toolchains, how to
|
||||
customize the development packages installation,
|
||||
how to use command line development for both Autotools-based and Makefile-based projects,
|
||||
and an introduction to the Eclipse Yocto Plug-in.
|
||||
</para>
|
||||
|
||||
<section id='adt-intro-section'>
|
||||
<title>The Application Development Toolkit (ADT)</title>
|
||||
<section id='book-intro'>
|
||||
<title>Introducing the Application Development Toolkit (ADT)</title>
|
||||
|
||||
<para>
|
||||
Part of the Yocto Project development solution is an Application Development
|
||||
Toolkit (ADT).
|
||||
The ADT provides you with a custom-built, cross-development
|
||||
platform suited for developing a user-targeted product application.
|
||||
Fundamentally, the ADT consists of an architecture-specific cross-toolchain and
|
||||
a matching sysroot that are both built by the Yocto Project build system Poky.
|
||||
The toolchain and sysroot are based on a metadata configuration and extensions,
|
||||
which allows you to cross-develop on the host machine for the target.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Fundamentally, the ADT consists of the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>An architecture-specific cross-toolchain and matching
|
||||
sysroot both built by the OpenEmbedded build system, which uses Poky.
|
||||
The toolchain and sysroot are based on a metadata configuration and extensions,
|
||||
which allows you to cross-develop on the host machine for the target hardware.
|
||||
</para></listitem>
|
||||
<listitem><para>The Eclipse IDE Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>The Quick EMUlator (QEMU), which lets you simulate target hardware.
|
||||
</para></listitem>
|
||||
<listitem><para>Various user-space tools that greatly enhance your application
|
||||
development experience.</para></listitem>
|
||||
</itemizedlist>
|
||||
Additionally, to provide an effective development platform, the Yocto Project
|
||||
makes available and suggests other tools you can use with the ADT.
|
||||
These other tools include the Eclipse IDE Yocto Plug-in, an emulator (QEMU),
|
||||
and various user-space tools that greatly enhance your development experience.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The resulting combination of the architecture-specific cross-toolchain and sysroot
|
||||
along with these additional tools yields a custom-built, cross-development platform
|
||||
for a user-targeted product.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='adt-components'>
|
||||
<title>ADT Components</title>
|
||||
|
||||
<para>
|
||||
This section provides a brief description of what comprises the ADT.
|
||||
</para>
|
||||
|
||||
<section id='the-cross-toolchain'>
|
||||
<title>The Cross-Toolchain</title>
|
||||
|
||||
<para>
|
||||
The cross-toolchain consists of a cross-compiler, cross-linker, and cross-debugger
|
||||
that are used to develop user-space applications for targeted hardware.
|
||||
This toolchain is created either by running the ADT Installer script, a toolchain installer
|
||||
script, or through a
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink> that
|
||||
is based on your metadata
|
||||
This toolchain is created either by running the ADT Installer script or
|
||||
through a Yocto Project build tree that is based on your metadata
|
||||
configuration or extension for your targeted device.
|
||||
The cross-toolchain works with a matching target sysroot.
|
||||
</para>
|
||||
@@ -70,38 +63,11 @@
|
||||
The matching target sysroot contains needed headers and libraries for generating
|
||||
binaries that run on the target architecture.
|
||||
The sysroot is based on the target root filesystem image that is built by
|
||||
the OpenEmbedded build system Poky and uses the same metadata configuration
|
||||
the Yocto Project's build system Poky and uses the same metadata configuration
|
||||
used to build the cross-toolchain.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='eclipse-overview'>
|
||||
<title>Eclipse Yocto Plug-in</title>
|
||||
|
||||
<para>
|
||||
The Eclipse IDE is a popular development environment and it fully supports
|
||||
development using the Yocto Project.
|
||||
When you install and configure the Eclipse Yocto Project Plug-in into
|
||||
the Eclipse IDE, you maximize your Yocto Project experience.
|
||||
Installing and configuring the Plug-in results in an environment that
|
||||
has extensions specifically designed to let you more easily develop software.
|
||||
These extensions allow for cross-compilation, deployment, and execution of
|
||||
your output into a QEMU emulation session.
|
||||
You can also perform cross-debugging and profiling.
|
||||
The environment also supports a suite of tools that allows you to perform
|
||||
remote profiling, tracing, collection of power data, collection of
|
||||
latency data, and collection of performance data.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For information about the application development workflow that uses the Eclipse
|
||||
IDE and for a detailed example of how to install and configure the Eclipse
|
||||
Yocto Project Plug-in, see the
|
||||
"<ulink url='&YOCTO_DOCS_DEV_URL;#adt-eclipse'>Working Within Eclipse</ulink>" section
|
||||
of the Yocto Project Development Manual.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='the-qemu-emulator'>
|
||||
<title>The QEMU Emulator</title>
|
||||
|
||||
@@ -113,10 +79,8 @@
|
||||
<listitem><para>If you use the ADT Installer script to install ADT, you can
|
||||
specify whether or not to install QEMU.</para></listitem>
|
||||
<listitem><para>If you have downloaded a Yocto Project release and unpacked
|
||||
it to create a
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink> and
|
||||
you have sourced
|
||||
the environment setup script, QEMU is installed and automatically
|
||||
it to create a Yocto Project file structure and you have sourced
|
||||
the Yocto Project environment setup script, QEMU is installed and automatically
|
||||
available.</para></listitem>
|
||||
<listitem><para>If you have installed the cross-toolchain
|
||||
tarball and you have sourcing the toolchain's setup environment script, QEMU
|
||||
@@ -143,7 +107,7 @@
|
||||
<listitem><para><emphasis>PowerTOP:</emphasis> Helps you determine what
|
||||
software is using the most power.
|
||||
You can find out more about PowerTOP at
|
||||
<ulink url='https://01.org/powertop/'></ulink>.</para></listitem>
|
||||
<ulink url='http://www.linuxpowertop.org/'></ulink>.</para></listitem>
|
||||
<listitem><para><emphasis>OProfile:</emphasis> A system-wide profiler for Linux
|
||||
systems that is capable of profiling all running code at low overhead.
|
||||
You can find out more about OProfile at
|
||||
@@ -156,7 +120,7 @@
|
||||
<listitem><para><emphasis>SystemTap:</emphasis> A free software infrastructure
|
||||
that simplifies information gathering about a running Linux system.
|
||||
This information helps you diagnose performance or functional problems.
|
||||
SystemTap is not available as a user-space tool through the Eclipse IDE Yocto Plug-in.
|
||||
SystemTap is not available as a user-space tool through the Yocto Eclipse IDE Plug-in.
|
||||
See <ulink url='http://sourceware.org/systemtap'></ulink> for more information
|
||||
on SystemTap.</para></listitem>
|
||||
<listitem><para><emphasis>Lttng-ust:</emphasis> A User-space Tracer designed to
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<book id='adt-manual' lang='en'
|
||||
<book id='adt-manual' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
xmlns="http://docbook.org/ns/docbook"
|
||||
>
|
||||
@@ -10,10 +10,10 @@
|
||||
|
||||
<mediaobject>
|
||||
<imageobject>
|
||||
<imagedata fileref='figures/adt-title.png'
|
||||
format='SVG'
|
||||
<imagedata fileref='figures/adt-title.png'
|
||||
format='SVG'
|
||||
align='left' scalefit='1' width='100%'/>
|
||||
</imageobject>
|
||||
</imageobject>
|
||||
</mediaobject>
|
||||
|
||||
<title></title>
|
||||
@@ -46,23 +46,8 @@
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.2</revnumber>
|
||||
<date>April 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>October 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3.1</revnumber>
|
||||
<date>April 2013</date>
|
||||
<revremark>Released with the Yocto Project 1.3.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3.2</revnumber>
|
||||
<date>May 2013</date>
|
||||
<revremark>Released with the Yocto Project 1.3.2 Release.</revremark>
|
||||
<date>TBD 2012</date>
|
||||
<revremark>Work in progress for the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
@@ -73,13 +58,14 @@
|
||||
|
||||
<legalnotice>
|
||||
<para>
|
||||
Permission is granted to copy, distribute and/or modify this document under
|
||||
Permission is granted to copy, distribute and/or modify this document under
|
||||
the terms of the <ulink type="http" url="http://creativecommons.org/licenses/by-sa/2.0/uk/">Creative Commons Attribution-Share Alike 2.0 UK: England & Wales</ulink> as published by Creative Commons.
|
||||
</para>
|
||||
<note>
|
||||
Due to production processes, there could be differences between the Yocto Project
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>Yocto Project Application Developer's Guide</ulink> on
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>
|
||||
Application Developer's Toolkit (ADT) User's Guide</ulink> on
|
||||
the <ulink url='&YOCTO_HOME_URL;'>Yocto Project</ulink> website.
|
||||
For the latest version of this manual, see the manual on the website.
|
||||
</note>
|
||||
@@ -94,6 +80,8 @@
|
||||
|
||||
<xi:include href="adt-package.xml"/>
|
||||
|
||||
<xi:include href="adt-eclipse.xml"/>
|
||||
|
||||
<xi:include href="adt-command.xml"/>
|
||||
|
||||
<!-- <index id='index'>
|
||||
@@ -102,6 +90,6 @@
|
||||
-->
|
||||
|
||||
</book>
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
<title>Package Management Systems</title>
|
||||
|
||||
<para>
|
||||
The OpenEmbedded build system supports the generation of sysroot files using
|
||||
The Yocto Project supports the generation of sysroot files using
|
||||
three different Package Management Systems (PMS):
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>OPKG:</emphasis> A less well known PMS whose use
|
||||
@@ -28,7 +28,7 @@
|
||||
<listitem><para><emphasis>RPM:</emphasis> A more widely known PMS intended for GNU/Linux
|
||||
distributions.
|
||||
This PMS works with files packaged in an <filename>.rms</filename> format.
|
||||
The build system currently installs through this PMS by default.
|
||||
The Yocto Project currently installs through this PMS by default.
|
||||
See <ulink url='http://en.wikipedia.org/wiki/RPM_Package_Manager'></ulink>
|
||||
for more information about RPM.</para></listitem>
|
||||
<listitem><para><emphasis>Debian:</emphasis> The PMS for Debian-based systems
|
||||
@@ -45,8 +45,7 @@
|
||||
|
||||
<para>
|
||||
Whichever PMS you are using, you need to be sure that the
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_CLASSES'><filename>PACKAGE_CLASSES</filename></ulink>
|
||||
variable in the <filename>conf/local.conf</filename>
|
||||
<filename>PACKAGE_CLASSES</filename> variable in the <filename>conf/local.conf</filename>
|
||||
file is set to reflect that system.
|
||||
The first value you choose for the variable specifies the package file format for the root
|
||||
filesystem at sysroot.
|
||||
@@ -56,8 +55,7 @@
|
||||
|
||||
<note>
|
||||
For build performance information related to the PMS, see
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-package'>Packaging - <filename>package*.bbclass</filename></ulink>
|
||||
in the Yocto Project Reference Manual.
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-package'>Packaging - <filename>package*.bbclass</filename></ulink> in The Yocto Project Reference Manual.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
@@ -77,8 +75,7 @@
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, source the environment setup script found in the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>.
|
||||
Next, source the environment setup script found in the Yocto Project files.
|
||||
Follow that by setting up the installation destination to point to your
|
||||
sysroot as <filename><sysroot_dir></filename>.
|
||||
Finally, have an OPKG configuration file <filename><conf_file></filename>
|
||||
|
||||
@@ -4,40 +4,25 @@
|
||||
|
||||
<chapter id='adt-prepare'>
|
||||
|
||||
<title>Preparing for Application Development</title>
|
||||
<title>Preparing to Use the Application Development Toolkit (ADT)</title>
|
||||
|
||||
<para>
|
||||
In order to develop applications, you need set up your host development system.
|
||||
Several ways exist that allow you to install cross-development tools, QEMU, the
|
||||
Eclipse Yocto Plug-in, and other tools.
|
||||
This chapter describes how to prepare for application development.
|
||||
In order to use the ADT, you must install it, <filename>source</filename> a script to set up the
|
||||
environment, and be sure both the kernel and filesystem image specific to the target architecture
|
||||
exist.
|
||||
This chapter describes how to be sure you meet the ADT requirements.
|
||||
</para>
|
||||
|
||||
<section id='installing-the-adt'>
|
||||
<title>Installing the ADT and Toolchains</title>
|
||||
<title>Installing the ADT</title>
|
||||
|
||||
<para>
|
||||
The following list describes installation methods that set up varying degrees of tool
|
||||
availabiltiy on your system.
|
||||
Regardless of the installation method you choose,
|
||||
you must <filename>source</filename> the cross-toolchain
|
||||
environment setup script before you use a toolchain.
|
||||
The following list describes how you can install the ADT, which includes the cross-toolchain.
|
||||
Regardless of the installation you choose, you must <filename>source</filename> the cross-toolchain
|
||||
environment setup script before you use the toolchain.
|
||||
See the "<link linkend='setting-up-the-cross-development-environment'>Setting Up the
|
||||
Cross-Development Environment</link>" section for more information.
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>Avoid mixing installation methods when installing toolchains for different architectures.
|
||||
For example, avoid using the ADT Installer to install some toolchains and then hand-installing
|
||||
cross-development toolchains by running the toolchain installer for different architectures.
|
||||
Mixing installation methods can result in situations where the ADT Installer becomes
|
||||
unreliable and might not install the toolchain.</para>
|
||||
<para>If you must mix installation methods, you might avoid problems by deleting
|
||||
<filename>/var/lib/opkg</filename>, thus purging the <filename>opkg</filename> package
|
||||
metadata</para>
|
||||
</note>
|
||||
|
||||
<para>
|
||||
Cross-Development Environment</link>"
|
||||
section for more information.
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Use the ADT Installer Script:</emphasis>
|
||||
This method is the recommended way to install the ADT because it
|
||||
@@ -45,15 +30,14 @@
|
||||
For example, you can configure the installation to install the QEMU emulator
|
||||
and the user-space NFS, specify which root filesystem profiles to download,
|
||||
and define the target sysroot location.</para></listitem>
|
||||
<listitem><para><emphasis>Use an Existing Toolchain:</emphasis>
|
||||
<listitem><para><emphasis>Use an Existing Toolchain Tarball:</emphasis>
|
||||
Using this method, you select and download an architecture-specific
|
||||
toolchain installer and then run the script to hand-install the toolchain.
|
||||
toolchain tarball and then hand-install the toolchain.
|
||||
If you use this method, you just get the cross-toolchain and QEMU - you do not
|
||||
get any of the other mentioned benefits had you run the ADT Installer script.</para></listitem>
|
||||
<listitem><para><emphasis>Use the Toolchain from within the Build Directory:</emphasis>
|
||||
If you already have a
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>,
|
||||
you can build the cross-toolchain within the directory.
|
||||
<listitem><para><emphasis>Use the Toolchain from within a Yocto Project Build Tree:</emphasis>
|
||||
If you already have a Yocto Project build tree, you can build the cross-toolchain
|
||||
within tree.
|
||||
However, like the previous method mentioned, you only get the cross-toolchain and QEMU - you
|
||||
do not get any of the other benefits without taking separate steps.</para></listitem>
|
||||
</itemizedlist>
|
||||
@@ -63,16 +47,8 @@
|
||||
<title>Using the ADT Installer</title>
|
||||
|
||||
<para>
|
||||
To run the ADT Installer, you need to get the ADT Installer tarball, be sure
|
||||
you have the necessary host development packages that support the ADT Installer,
|
||||
and then run the ADT Installer Script.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For a list of the host packages needed to support ADT installation and use, see the
|
||||
"ADT Installer Extras" lists in the
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#required-packages-for-the-host-development-system'>Required Packages for the Host Development System</ulink>" section
|
||||
of the Yocto Project Reference Manual.
|
||||
To run the ADT Installer, you need to first get the ADT Installer tarball and then run the ADT
|
||||
Installer Script.
|
||||
</para>
|
||||
|
||||
<section id='getting-the-adt-installer-tarball'>
|
||||
@@ -84,22 +60,22 @@
|
||||
<ulink url='&YOCTO_DL_URL;/releases'>Index of Releases</ulink>, specifically
|
||||
at
|
||||
<ulink url='&YOCTO_ADTINSTALLER_DL_URL;'></ulink>.
|
||||
Or, you can use BitBake to generate the tarball inside the existing
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
|
||||
Or, you can use BitBake to generate the tarball inside the existing Yocto Project
|
||||
build tree.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you use BitBake to generate the ADT Installer tarball, you must
|
||||
<filename>source</filename> the environment setup script
|
||||
(<filename>&OE_INIT_FILE;</filename>) located
|
||||
in the Source Directory before running the <filename>bitbake</filename>
|
||||
<filename>source</filename> the Yocto Project environment setup script
|
||||
(<filename>oe-init-build-env</filename>) located
|
||||
in the Yocto Project file structure before running the <filename>bitbake</filename>
|
||||
command that creates the tarball.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following example commands download the Poky tarball, set up the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>,
|
||||
set up the environment while also creating the default Build Directory,
|
||||
The following example commands download the Yocto Project release tarball, set up the Yocto
|
||||
Project files structure, set up the environment while also creating the
|
||||
default Yocto Project build tree,
|
||||
and run the <filename>bitbake</filename> command that results in the tarball
|
||||
<filename>~/yocto-project/build/tmp/deploy/sdk/adt_installer.tar.bz2</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
@@ -128,16 +104,13 @@
|
||||
$ cp ~/poky/build/tmp/deploy/sdk/adt_installer.tar.bz2 $HOME
|
||||
$ tar -xjf adt_installer.tar.bz2
|
||||
</literallayout>
|
||||
Unpacking the tarball creates the directory <filename>adt-installer</filename>,
|
||||
which contains the ADT Installer script (<filename>adt_installer</filename>),
|
||||
its configuration file (<filename>adt_installer.conf</filename>), a
|
||||
<filename>scripts</filename> directory, and an <filename>opkg</filename>
|
||||
directory.
|
||||
Unpacking it creates the directory <filename>adt-installer</filename>,
|
||||
which contains the ADT Installer script (<filename>adt_installer</filename>)
|
||||
and its configuration file (<filename>adt_installer.conf</filename>).
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Before you run the ADT Installer script, however, you should examine
|
||||
the ADT Installer configuration
|
||||
Before you run the script, however, you should examine the ADT Installer configuration
|
||||
file and be sure you are going to get what you want.
|
||||
Your configurations determine which kernel and filesystem image are downloaded.
|
||||
</para>
|
||||
@@ -155,22 +128,7 @@
|
||||
<filename>YOCTOADT_REPO</filename>, you need to be sure that the
|
||||
directory structure follows the same layout as the reference directory
|
||||
set up at <ulink url='http://adtrepo.yoctoproject.org'></ulink>.
|
||||
Also, your repository needs to be accessible through HTTP.</para>
|
||||
<para>Additionally, you will need to edit a second configuration file
|
||||
located in the <filename>adt-installer/opkg</filename> directory.
|
||||
The configuration file you edit depends on your host development
|
||||
system.
|
||||
For 64-bit systems, edit the <filename>opkg-sdk-x86_64.conf</filename>
|
||||
file.
|
||||
If your host development system is 32-bit, edit the
|
||||
<filename>opkg-sdk-i686.conf</filename> file.
|
||||
For both cases, you need to make sure you are pointing to
|
||||
the IPKG-based packages specified by the
|
||||
<filename>YOCTOADT_REPO</filename>.
|
||||
Here is an example for a 64-bit development system:
|
||||
<literallayout class='monospaced'>
|
||||
src yp-x86_64-nativesdk http://my_repo/yp-1.3.1/adt-ipk/x86_64-nativesdk
|
||||
</literallayout></para></listitem>
|
||||
Also, your repository needs to be accessible through HTTP.</para></listitem>
|
||||
<listitem><para><filename>YOCTOADT_TARGETS</filename>: The machine
|
||||
target architectures for which you want to set up cross-development
|
||||
environments.</para></listitem>
|
||||
@@ -178,7 +136,7 @@
|
||||
or not to install the emulator QEMU.</para></listitem>
|
||||
<listitem><para><filename>YOCTOADT_NFS_UTIL</filename>: Indicates whether
|
||||
or not to install user-mode NFS.
|
||||
If you plan to use the Eclipse IDE Yocto plug-in against QEMU,
|
||||
If you plan to use the Yocto Eclipse IDE plug-in against QEMU,
|
||||
you should install NFS.
|
||||
<note>To boot QEMU images using our userspace NFS server, you need
|
||||
to be running <filename>portmap</filename> or <filename>rpcbind</filename>.
|
||||
@@ -208,18 +166,21 @@
|
||||
|
||||
<para>
|
||||
After you have configured the <filename>adt_installer.conf</filename> file,
|
||||
run the installer using the following command.
|
||||
Be sure that you are not trying to use cross-compilation tools.
|
||||
When you run the installer, the environment must use a
|
||||
host <filename>gcc</filename>:
|
||||
run the installer using the following command:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/adt-installer
|
||||
$ ./adt_installer
|
||||
</literallayout>
|
||||
Once the installer begins to run, you are asked to enter the location for
|
||||
cross-toolchain installation.
|
||||
The default location is <filename>/opt/poky/<release></filename>.
|
||||
After selecting the location, you are prompted to run in
|
||||
</para>
|
||||
|
||||
<note>
|
||||
The ADT Installer requires the <filename>libtool</filename> package to complete.
|
||||
If you install the recommended packages as described in
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#packages'>The Packages</ulink>"
|
||||
section of The Yocto Project Quick Start, then you will have libtool installed.
|
||||
</note>
|
||||
|
||||
<para>
|
||||
Once the installer begins to run, you are asked whether you want to run in
|
||||
interactive or silent mode.
|
||||
If you want to closely monitor the installation, choose “I” for interactive
|
||||
mode rather than “S” for silent mode.
|
||||
@@ -242,12 +203,10 @@
|
||||
<title>Using a Cross-Toolchain Tarball</title>
|
||||
|
||||
<para>
|
||||
If you want to simply install the cross-toolchain by hand, you can do so by running the
|
||||
toolchain installer.
|
||||
If you want to simply install the cross-toolchain by hand, you can do so by using an existing
|
||||
cross-toolchain tarball.
|
||||
If you use this method to install the cross-toolchain and you still need to install the target
|
||||
sysroot, you will have to extract and install sysroot separately.
|
||||
For information on how to do this, see the
|
||||
"<link linkend='extracting-the-root-filesystem'>Extracting the Root Filesystem</link>" section.
|
||||
sysroot, you will have to install sysroot separately.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -258,43 +217,30 @@
|
||||
and find the folder that matches your host development system
|
||||
(i.e. <filename>i686</filename> for 32-bit machines or
|
||||
<filename>x86-64</filename> for 64-bit machines).</para></listitem>
|
||||
<listitem><para>Go into that folder and download the toolchain installer whose name
|
||||
<listitem><para>Go into that folder and download the toolchain tarball whose name
|
||||
includes the appropriate target architecture.
|
||||
For example, if your host development system is an Intel-based 64-bit system and
|
||||
you are going to use your cross-toolchain for an Intel-based 32-bit target, go into the
|
||||
<filename>x86_64</filename> folder and download the following installer:
|
||||
<filename>x86_64</filename> folder and download the following tarball:
|
||||
<literallayout class='monospaced'>
|
||||
poky-eglibc-x86_64-i586-toolchain-gmae-&DISTRO;.sh
|
||||
poky-eglibc-x86_64-i586-toolchain-gmae-&DISTRO;.tar.bz2
|
||||
</literallayout>
|
||||
<note><para>As an alternative to steps one and two, you can build the toolchain installer
|
||||
if you have a <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
|
||||
<note><para>As an alternative to steps one and two, you can build the toolchain tarball
|
||||
if you have a Yocto Project build tree.
|
||||
If you need GMAE, you should use the <filename>bitbake meta-toolchain-gmae</filename>
|
||||
command.
|
||||
The resulting installation script when run will support such development.
|
||||
The resulting tarball will support such development.
|
||||
However, if you are not concerned with GMAE,
|
||||
you can generate the toolchain installer using
|
||||
<filename>bitbake meta-toolchain</filename>.</para>
|
||||
you can generate the tarball using <filename>bitbake meta-toolchain</filename>.</para>
|
||||
<para>Use the appropriate <filename>bitbake</filename> command only after you have
|
||||
sourced the <filename>&OE_INIT_PATH;</filename> script located in the Source
|
||||
Directory.
|
||||
When the <filename>bitbake</filename> command completes, the toolchain installer will
|
||||
be in <filename>tmp/deploy/sdk</filename> in the Build Directory.
|
||||
</para></note>
|
||||
</para></listitem>
|
||||
<listitem><para>Once you have the installer, run it to install the toolchain.
|
||||
You must change the permissions on the toolchain installer
|
||||
script so that it is executable.</para>
|
||||
<para>The following command shows how to run the installer given a toolchain tarball
|
||||
for a 64-bit development host system and a 32-bit target architecture.
|
||||
The example assumes the toolchain installer is located in <filename>~/Downloads/</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
$ ~/Downloads/poky-eglibc-x86_64-i586-toolchain-gmae-&DISTRO;.sh
|
||||
</literallayout>
|
||||
<note>
|
||||
If you do not have write permissions for the directory into which you are installing
|
||||
the toolchain, the toolchain installer notifies you and exits.
|
||||
Be sure you have write permissions in the directory and run the installer again.
|
||||
</note>
|
||||
sourced the <filename>oe-build-init-env</filename> script located in the Yocto
|
||||
Project files.
|
||||
When the <filename>bitbake</filename> command completes, the tarball will
|
||||
be in <filename>tmp/deploy/sdk</filename> in the Yocto Project build tree.
|
||||
</para></note></para></listitem>
|
||||
<listitem><para>Make sure you are in the root directory with root privileges and then expand
|
||||
the tarball.
|
||||
The tarball expands into <filename>&YOCTO_ADTPATH_DIR;</filename>.
|
||||
Once the tarball is expanded, the cross-toolchain is installed.
|
||||
You will notice environment setup files for the cross-toolchain in the directory.
|
||||
</para></listitem>
|
||||
@@ -303,54 +249,47 @@
|
||||
</section>
|
||||
|
||||
<section id='using-the-toolchain-from-within-the-build-tree'>
|
||||
<title>Using BitBake and the Build Directory</title>
|
||||
<title>Using BitBake and the Yocto Project Build Tree</title>
|
||||
|
||||
<para>
|
||||
A final way of making the cross-toolchain available is to use BitBake
|
||||
to generate the toolchain within an existing
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
|
||||
This method does not install the toolchain into the
|
||||
<filename>/opt</filename> directory.
|
||||
A final way of installing just the cross-toolchain is to use BitBake to build the
|
||||
toolchain within an existing Yocto Project build tree.
|
||||
This method does not install the toolchain into the <filename>/opt</filename> directory.
|
||||
As with the previous method, if you need to install the target sysroot, you must
|
||||
do that separately as well.
|
||||
do this separately.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Follow these steps to generate the toolchain into the Build Directory:
|
||||
Follow these steps to build and install the toolchain into the build tree:
|
||||
<orderedlist>
|
||||
<listitem><para>Source the environment setup script
|
||||
<filename>&OE_INIT_FILE;</filename> located in the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>.
|
||||
</para></listitem>
|
||||
<filename>oe-init-build-env</filename> located in the Yocto Project
|
||||
files.</para></listitem>
|
||||
<listitem><para>At this point, you should be sure that the
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'><filename>MACHINE</filename></ulink> variable
|
||||
<filename>MACHINE</filename> variable
|
||||
in the <filename>local.conf</filename> file found in the
|
||||
<filename>conf</filename> directory of the Build Directory
|
||||
<filename>conf</filename> directory of the Yocto Project build directory
|
||||
is set for the target architecture.
|
||||
Comments within the <filename>local.conf</filename> file list the values you
|
||||
can use for the <filename>MACHINE</filename> variable.
|
||||
<note>You can populate the Build Directory with the cross-toolchains for more
|
||||
<note>You can populate the build tree with the cross-toolchains for more
|
||||
than a single architecture.
|
||||
You just need to edit the <filename>MACHINE</filename> variable in the
|
||||
<filename>local.conf</filename> file and re-run the BitBake
|
||||
command.</note></para></listitem>
|
||||
<listitem><para>Run <filename>bitbake meta-ide-support</filename> to complete the
|
||||
cross-toolchain generation.
|
||||
<note>If you change out of your working directory after you
|
||||
cross-toolchain installation.
|
||||
<note>If change out of your working directory after you
|
||||
<filename>source</filename> the environment setup script and before you run
|
||||
the <filename>bitbake</filename> command, the command might not work.
|
||||
Be sure to run the <filename>bitbake</filename> command immediately
|
||||
after checking or editing the <filename>local.conf</filename> but without
|
||||
changing out of your working directory.</note>
|
||||
Once the <filename>bitbake</filename> command finishes,
|
||||
the cross-toolchain is generated and populated within the Build Directory.
|
||||
the tarball for the cross-toolchain is generated within the Yocto Project build tree.
|
||||
You will notice environment setup files for the cross-toolchain in the
|
||||
Build Directory in the <filename>tmp</filename> directory.
|
||||
Setup script filenames contain the strings <filename>environment-setup</filename>.</para>
|
||||
<para>Be aware that when you use this method to install the toolchain you still need
|
||||
to separately extract and install the sysroot filesystem.
|
||||
For information on how to do this, see the
|
||||
"<link linkend='extracting-the-root-filesystem'>Extracting the Root Filesystem</link>" section.
|
||||
Yocto Project build tree in the <filename>tmp</filename> directory.
|
||||
Setup script filenames contain the strings <filename>environment-setup</filename>.
|
||||
</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
@@ -363,13 +302,11 @@
|
||||
<para>
|
||||
Before you can develop using the cross-toolchain, you need to set up the
|
||||
cross-development environment by sourcing the toolchain's environment setup script.
|
||||
If you used the ADT Installer or hand-installed cross-toolchain,
|
||||
If you used the ADT Installer or used an existing ADT tarball to install the ADT,
|
||||
then you can find this script in the <filename>&YOCTO_ADTPATH_DIR;</filename>
|
||||
directory.
|
||||
If you installed the toolchain in the
|
||||
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>,
|
||||
you can find the environment setup
|
||||
script for the toolchain in the Build Directory's <filename>tmp</filename> directory.
|
||||
If you installed the toolchain in the build tree, you can find the environment setup
|
||||
script for the toolchain in the Yocto Project build tree's <filename>tmp</filename> directory.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
@@ -403,33 +340,24 @@
|
||||
pre-built versions.
|
||||
You can find examples for both these situations in the
|
||||
"<ulink url='&YOCTO_DOCS_QS_URL;#test-run'>A Quick Test Run</ulink>" section of
|
||||
the Yocto Project Quick Start.
|
||||
The Yocto Project Quick Start.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Yocto Project ships basic kernel and filesystem images for several
|
||||
The Yocto Project provides basic kernel and filesystem images for several
|
||||
architectures (<filename>x86</filename>, <filename>x86-64</filename>,
|
||||
<filename>mips</filename>, <filename>powerpc</filename>, and <filename>arm</filename>)
|
||||
that you can use unaltered in the QEMU emulator.
|
||||
These kernel images reside in the release
|
||||
These kernel images reside in the Yocto Project release
|
||||
area - <ulink url='&YOCTO_MACHINES_DL_URL;'></ulink>
|
||||
and are ideal for experimentation using Yocto Project.
|
||||
For information on the image types you can build using the OpenEmbedded build system,
|
||||
see the
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Images</ulink>" chapter in
|
||||
the Yocto Project Reference Manual.
|
||||
and are ideal for experimentation within Yocto Project.
|
||||
For information on the image types you can build using the Yocto Project, see the
|
||||
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Reference: Images</ulink>" appendix in
|
||||
The Yocto Project Reference Manual.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you are planning on developing against your image and you are not
|
||||
building or using one of the Yocto Project development images
|
||||
(e.g. core-image-*-dev), you must be sure to include the development
|
||||
packages as part of your image recipe.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Furthermore, if you plan on remotely deploying and debugging your
|
||||
application from within the
|
||||
If you plan on remotely deploying and debugging your application from within the
|
||||
Eclipse IDE, you must have an image that contains the Yocto Target Communication
|
||||
Framework (TCF) agent (<filename>tcf-agent</filename>).
|
||||
By default, the Yocto Project provides only one type pre-built image that contains the
|
||||
@@ -442,10 +370,8 @@
|
||||
you can do so one of two ways:
|
||||
<itemizedlist>
|
||||
<listitem><para>Modify the <filename>conf/local.conf</filename> configuration in
|
||||
the <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>
|
||||
and then rebuild the image.
|
||||
With this method, you need to modify the
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#var-EXTRA_IMAGE_FEATURES'><filename>EXTRA_IMAGE_FEATURES</filename></ulink>
|
||||
the Yocto Project build directory and then rebuild the image.
|
||||
With this method, you need to modify the <filename>EXTRA_IMAGE_FEATURES</filename>
|
||||
variable to have the value of "tools-debug" before rebuilding the image.
|
||||
Once the image is rebuilt, the <filename>tcf-agent</filename> will be included
|
||||
in the image and is launched automatically after the boot.</para></listitem>
|
||||
@@ -453,7 +379,7 @@
|
||||
To build the agent, follow these steps:
|
||||
<orderedlist>
|
||||
<listitem><para>Be sure the ADT is installed as described in the
|
||||
"<link linkend='installing-the-adt'>Installing the ADT and Toolchains</link>" section.
|
||||
"<link linkend='installing-the-adt'>Installing the ADT</link>" section.
|
||||
</para></listitem>
|
||||
<listitem><para>Set up the cross-development environment as described in the
|
||||
"<link linkend='setting-up-the-cross-development-environment'>Setting
|
||||
@@ -466,8 +392,7 @@
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para>Modify the <filename>Makefile.inc</filename> file
|
||||
for the cross-compilation environment by setting the
|
||||
<filename>OPSYS</filename> and
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'><filename>MACHINE</filename></ulink>
|
||||
<filename>OPSYS</filename> and <filename>MACHINE</filename>
|
||||
variables according to your target.</para></listitem>
|
||||
<listitem><para>Use the cross-development tools to build the
|
||||
<filename>tcf-agent</filename>.
|
||||
|
||||
|
Before Width: | Height: | Size: 13 KiB After Width: | Height: | Size: 17 KiB |
@@ -110,7 +110,7 @@ h5 {
|
||||
h6 {
|
||||
margin: 1em 0em 0em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 110%;
|
||||
font-size: 80%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<book id='bsp-guide' lang='en'
|
||||
<book id='bsp-guide' lang='en'
|
||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
||||
xmlns="http://docbook.org/ns/docbook"
|
||||
>
|
||||
@@ -10,13 +10,13 @@
|
||||
|
||||
<mediaobject>
|
||||
<imageobject>
|
||||
<imagedata fileref='figures/bsp-title.png'
|
||||
format='SVG'
|
||||
<imagedata fileref='figures/bsp-title.png'
|
||||
format='SVG'
|
||||
align='center' scalefit='1' width='100%'/>
|
||||
</imageobject>
|
||||
</imageobject>
|
||||
</mediaobject>
|
||||
|
||||
<title></title>
|
||||
<title></title>
|
||||
|
||||
<authorgroup>
|
||||
<author>
|
||||
@@ -58,23 +58,8 @@
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.2</revnumber>
|
||||
<date>April 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3</revnumber>
|
||||
<date>October 2012</date>
|
||||
<revremark>Released with the Yocto Project 1.3 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3.1</revnumber>
|
||||
<date>April 2013</date>
|
||||
<revremark>Released with the Yocto Project 1.3.1 Release.</revremark>
|
||||
</revision>
|
||||
<revision>
|
||||
<revnumber>1.3.2</revnumber>
|
||||
<date>May 2013</date>
|
||||
<revremark>Released with the Yocto Project 1.3.2 Release.</revremark>
|
||||
<date>TBD 2012</date>
|
||||
<revremark>Work in progress for the Yocto Project 1.2 Release.</revremark>
|
||||
</revision>
|
||||
</revhistory>
|
||||
|
||||
@@ -85,13 +70,14 @@
|
||||
|
||||
<legalnotice>
|
||||
<para>
|
||||
Permission is granted to copy, distribute and/or modify this document under
|
||||
Permission is granted to copy, distribute and/or modify this document under
|
||||
the terms of the <ulink type="http" url="http://creativecommons.org/licenses/by-nc-sa/2.0/uk/">Creative Commons Attribution-Non-Commercial-Share Alike 2.0 UK: England & Wales</ulink> as published by Creative Commons.
|
||||
</para>
|
||||
<note>
|
||||
Due to production processes, there could be differences between the Yocto Project
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>Yocto Project Board Support Package (BSP) Developer's Guide</ulink> on
|
||||
documentation bundled in the release tarball and the
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>
|
||||
Board Support Package (BSP) Developer's Guide</ulink> on
|
||||
the <ulink url='&YOCTO_HOME_URL;'>Yocto Project</ulink> website.
|
||||
For the latest version of this manual, see the manual on the website.
|
||||
</note>
|
||||
@@ -107,6 +93,6 @@
|
||||
-->
|
||||
|
||||
</book>
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||
|
||||
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
@@ -110,7 +110,7 @@ h5 {
|
||||
h6 {
|
||||
margin: 1em 0em 0em 0em;
|
||||
padding: 1em 0em 0em 0em;
|
||||
font-size: 110%;
|
||||
font-size: 80%;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
706
documentation/dev-manual/dev-manual-bsp-appendix.xml
Normal file
@@ -0,0 +1,706 @@
|
||||
<!DOCTYPE appendix PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<appendix id='dev-manual-bsp-appendix'>
|
||||
|
||||
<title>BSP Development Example</title>
|
||||
|
||||
<para>
|
||||
This appendix provides a complete BSP development example.
|
||||
The example assumes the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>No previous preparation or use of the Yocto Project.</para></listitem>
|
||||
<listitem><para>Use of the Crown Bay Board Support Package (BSP) as a "base" BSP from
|
||||
which to work.
|
||||
The example begins with the Crown Bay BSP as the starting point
|
||||
but ends by building a new 'atom-pc' BSP, which was based on the Crown Bay BSP.
|
||||
</para></listitem>
|
||||
<listitem><para>Shell commands assume <filename>bash</filename></para></listitem>
|
||||
<listitem><para>Example was developed on an Intel-based Core i7 platform running
|
||||
Ubuntu 10.04 LTS released in April of 2010.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<section id='getting-local-yocto-project-files-and-bsp-files'>
|
||||
<title>Getting Local Yocto Project Files and BSP Files</title>
|
||||
|
||||
<para>
|
||||
You need to have the Yocto Project files available on your host system.
|
||||
You can get files through tarball extraction or by cloning the <filename>poky</filename>
|
||||
Git repository.
|
||||
The following paragraphs describe both methods.
|
||||
For additional information, see the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>".
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As mentioned, one way to get the Yocto Project files is to use Git to clone the
|
||||
<filename>poky</filename> repository.
|
||||
These commands create a local copy of the Git repository.
|
||||
By default, the top-level directory of the repository is named <filename>poky</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/poky
|
||||
$ cd poky
|
||||
</literallayout>
|
||||
Alternatively, you can start with the downloaded Poky "edison" tarball.
|
||||
These commands unpack the tarball into a Yocto Project File directory structure.
|
||||
By default, the top-level directory of the file structure is named
|
||||
<filename>poky-&YOCTO_POKY;</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar xfj &YOCTO_POKY_TARBALL;
|
||||
$ cd &YOCTO_POKY;
|
||||
</literallayout>
|
||||
<note><para>If you're using the tarball method, you can ignore all the following steps that
|
||||
ask you to carry out Git operations.
|
||||
You already have the results of those operations
|
||||
in the form of the edison release tarballs.
|
||||
Consequently, there is nothing left to do other than extract those tarballs into the
|
||||
proper locations.</para>
|
||||
|
||||
<para>Once you expand the released tarball, you have a snapshot of the Git repository
|
||||
that represents a specific release.
|
||||
Fundamentally, this is different than having a local copy of the Yocto Project
|
||||
Git repository.
|
||||
Given the tarball method, changes you make are building on top of a release.
|
||||
With the Git repository method you have the ability to track development
|
||||
and keep changes in revision control.
|
||||
See the
|
||||
"<link linkend='repositories-tags-and-branches'>Repositories, Tags, and Branches</link>" section
|
||||
for more discussion around these differneces.</para></note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
With the local <filename>poky</filename> Git repository set up,
|
||||
you have all the development branches available to you from which you can work.
|
||||
Next, you need to be sure that your local repository reflects the exact
|
||||
release in which you are interested.
|
||||
From inside the repository you can see the development branches that represent
|
||||
areas of development that have diverged from the main (master) branch
|
||||
at some point, such as a branch to track a maintenance release's development.
|
||||
You can also see the tag names used to mark snapshots of stable releases or
|
||||
points in the repository.
|
||||
Use the following commands to list out the branches and the tags in the repository,
|
||||
respectively.
|
||||
<literallayout class='monospaced'>
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
For this example, we are going to use the Yocto Project &DISTRO; Release, which is code
|
||||
named "&DISTRO_NAME;".
|
||||
To make sure we have a local area (branch in Git terms) on our machine that
|
||||
reflects the &DISTRO; release, we can use the following commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ git fetch --tags
|
||||
$ git checkout &DISTRO_NAME;-&POKYVERSION; -b &DISTRO_NAME;
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
The <filename>git fetch --tags</filename> is somewhat redundant since you just set
|
||||
up the repository and should have all the tags.
|
||||
The <filename>fetch</filename> command makes sure all the tags are available in your
|
||||
local repository.
|
||||
The Git <filename>checkout</filename> command with the <filename>-b</filename> option
|
||||
creates a local branch for you named <filename>&DISTRO_NAME;</filename>.
|
||||
Your local branch begins in the same state as the Yocto Project &DISTRO; released tarball
|
||||
marked with the <filename>&DISTRO_NAME;-&POKYVERSION;</filename> tag in the source repositories.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='choosing-a-base-bsp-app'>
|
||||
<title>Choosing a Base BSP</title>
|
||||
|
||||
<para>
|
||||
For this example, the base BSP is the <trademark class='registered'>Intel</trademark>
|
||||
<trademark class='trade'>Atom</trademark> Processor E660 with Intel Platform
|
||||
Controller Hub EG20T Development Kit, which is otherwise referred to as "Crown Bay."
|
||||
The BSP layer is <filename>meta-crownbay</filename>.
|
||||
The base BSP is simply the BSP
|
||||
we will be using as a starting point, so don't worry if you don't actually have Crown Bay
|
||||
hardware.
|
||||
The remainder of the example transforms the base BSP into a BSP that should be
|
||||
able to boot on generic atom-pc (netbook) hardware.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For information on how to choose a base BSP, see
|
||||
"<link linkend='developing-a-board-support-package-bsp'>Developing a Board Support Package (BSP)</link>".
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='getting-your-base-bsp-app'>
|
||||
<title>Getting Your Base BSP</title>
|
||||
|
||||
<para>
|
||||
You need to have the base BSP layer on your development system.
|
||||
Similar to the local Yocto Project files, you can get the BSP
|
||||
layer in a couple of different ways:
|
||||
download the BSP tarball and extract it, or set up a local Git repository that
|
||||
has the Yocto Project BSP layers.
|
||||
You should use the same method that you used to get the local Yocto Project files earlier.
|
||||
See "<link linkend='getting-setup'>Getting Setup</link>" for information on how to get
|
||||
the BSP files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This example assumes the BSP layer will be located within a directory named
|
||||
<filename>meta-intel</filename> contained within the <filename>poky</filename>
|
||||
parent directory.
|
||||
The following steps will automatically create the
|
||||
<filename>meta-intel</filename> directory and the contained
|
||||
<filename>meta-crownbay</filename> starting point in both the Git and the tarball cases.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you're using the Git method, you could do the following to create
|
||||
the starting layout after you have made sure you are in the <filename>poky</filename>
|
||||
directory created in the previous steps:
|
||||
<literallayout class='monospaced'>
|
||||
$ git clone git://git.yoctoproject.org/meta-intel.git
|
||||
$ cd meta-intel
|
||||
</literallayout>
|
||||
Alternatively, you can start with the downloaded Crown Bay tarball.
|
||||
You can download the edison version of the BSP tarball from the
|
||||
<ulink url='&YOCTO_HOME_URL;/download'>Download</ulink> page of the
|
||||
Yocto Project website.
|
||||
Here is the specific link for the tarball needed for this example:
|
||||
<ulink url='&YOCTO_MACHINES_DL_URL;/crownbay-noemgd/crownbay-noemgd-&DISTRO_NAME;-6.0.0.tar.bz2'></ulink>.
|
||||
Again, be sure that you are already in the <filename>poky</filename> directory
|
||||
as described previously before installing the tarball:
|
||||
<literallayout class='monospaced'>
|
||||
$ tar xfj crownbay-noemgd-&DISTRO_NAME;-6.0.0.tar.bz2
|
||||
$ cd meta-intel
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>meta-intel</filename> directory contains all the metadata
|
||||
that supports BSP creation.
|
||||
If you're using the Git method, the following
|
||||
step will switch to the edison metadata.
|
||||
If you're using the tarball method, you already have the correct metadata and can
|
||||
skip to the next step.
|
||||
Because <filename>meta-intel</filename> is its own Git repository, you will want
|
||||
to be sure you are in the appropriate branch for your work.
|
||||
For this example we are going to use the <filename>&DISTRO_NAME;</filename> branch.
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='making-a-copy-of-the-base bsp-to-create-your-new-bsp-layer-app'>
|
||||
<title>Making a Copy of the Base BSP to Create Your New BSP Layer</title>
|
||||
|
||||
<para>
|
||||
Now that you have the local Yocto Project files and the base BSP files, you need to create a
|
||||
new layer for your BSP.
|
||||
To create your BSP layer, you simply copy the <filename>meta-crownbay</filename>
|
||||
layer to a new layer.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For this example, the new layer will be named <filename>meta-mymachine</filename>.
|
||||
The name should follow the BSP layer naming convention, which is
|
||||
<filename>meta-<name></filename>.
|
||||
The following assumes your working directory is <filename>meta-intel</filename>
|
||||
inside the local Yocto Project files.
|
||||
To start your new layer, just copy the new layer alongside the existing
|
||||
BSP layers in the <filename>meta-intel</filename> directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ cp -a meta-crownbay/ meta-mymachine
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='making-changes-to-your-bsp-app'>
|
||||
<title>Making Changes to Your BSP</title>
|
||||
|
||||
<para>
|
||||
Right now you have two identical BSP layers with different names:
|
||||
<filename>meta-crownbay</filename> and <filename>meta-mymachine</filename>.
|
||||
You need to change your configurations so that they work for your new BSP and
|
||||
your particular hardware.
|
||||
The following sections look at each of these areas of the BSP.
|
||||
</para>
|
||||
|
||||
<section id='changing-the-bsp-configuration'>
|
||||
<title>Changing the BSP Configuration</title>
|
||||
|
||||
<para>
|
||||
We will look first at the configurations, which are all done in the layer’s
|
||||
<filename>conf</filename> directory.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
First, since in this example the new BSP will not support EMGD, we will get rid of the
|
||||
<filename>crownbay.conf</filename> file and then rename the
|
||||
<filename>crownbay-noemgd.conf</filename> file to <filename>mymachine.conf</filename>.
|
||||
Much of what we do in the configuration directory is designed to help the Yocto Project
|
||||
build system work with the new layer and to be able to find and use the right software.
|
||||
The following two commands result in a single machine configuration file named
|
||||
<filename>mymachine.conf</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
$ rm meta-mymachine/conf/machine/crownbay.conf
|
||||
$ mv meta-mymachine/conf/machine/crownbay-noemgd.conf \
|
||||
meta-mymachine/conf/machine/mymachine.conf
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Next, we need to make changes to the <filename>mymachine.conf</filename> itself.
|
||||
The only changes we want to make for this example are to the comment lines.
|
||||
Changing comments, of course, is never strictly necessary, but it's alway good form to make
|
||||
them reflect reality as much as possible.
|
||||
|
||||
Here, simply substitute the Crown Bay name with an appropriate name for the BSP
|
||||
(<filename>mymachine</filename> in this case) and change the description to
|
||||
something that describes your hardware.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Note that inside the <filename>mymachine.conf</filename> is the
|
||||
<filename>PREFERRED_PROVIDER_virtual/kernel</filename> statement.
|
||||
This statement identifies the kernel that the BSP is going to use.
|
||||
In this case, the BSP is using <filename>linux-yocto</filename>, which is the
|
||||
current Linux Yocto kernel based on the Linux 3.0 release.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The next configuration file in the new BSP layer we need to edit is
|
||||
<filename>meta-mymachine/conf/layer.conf</filename>.
|
||||
This file identifies build information needed for the new layer.
|
||||
You can see the
|
||||
"<ulink url='&YOCTO_DOCS_BSP_URL;#bsp-filelayout-layer'>Layer Configuration File</ulink>" section
|
||||
in The Board Support Packages (BSP) Development Guide for more information on this configuration file.
|
||||
Basically, we are changing the existing statements to work with our BSP.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The file contains these statements that reference the Crown Bay BSP:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILE_COLLECTIONS += "crownbay"
|
||||
BBFILE_PATTERN_crownbay := "^${LAYERDIR}/"
|
||||
BBFILE_PRIORITY_crownbay = "6"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Simply substitute the machine string name <filename>crownbay</filename>
|
||||
with the new machine name <filename>mymachine</filename> to get the following:
|
||||
<literallayout class='monospaced'>
|
||||
BBFILE_COLLECTIONS += "mymachine"
|
||||
BBFILE_PATTERN_mymachine := "^${LAYERDIR}/"
|
||||
BBFILE_PRIORITY_mymachine = "6"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-the-recipes-in-your-bsp'>
|
||||
<title>Changing the Recipes in Your BSP</title>
|
||||
|
||||
<para>
|
||||
Now we will take a look at the recipes in your new layer.
|
||||
The standard BSP structure has areas for BSP, graphics, core, and kernel recipes.
|
||||
When you create a BSP, you use these areas for appropriate recipes and append files.
|
||||
Recipes take the form of <filename>.bb</filename> files, while append files take
|
||||
the form of <filename>.bbappend</filename> files.
|
||||
If you want to leverage the existing recipes the Yocto Project build system uses
|
||||
but change those recipes, you can use <filename>.bbappend</filename> files.
|
||||
All new recipes and append files for your layer must go in the layer’s
|
||||
<filename>recipes-bsp</filename>, <filename>recipes-kernel</filename>,
|
||||
<filename>recipes-core</filename>, and
|
||||
<filename>recipes-graphics</filename> directories.
|
||||
</para>
|
||||
|
||||
<section id='changing-recipes-bsp'>
|
||||
<title>Changing <filename>recipes-bsp</filename></title>
|
||||
|
||||
<para>
|
||||
First, let's look at <filename>recipes-bsp</filename>.
|
||||
For this example we are not adding any new BSP recipes.
|
||||
And, we only need to remove the formfactor we do not want and change the name of
|
||||
the remaining one that doesn't support EMGD.
|
||||
These commands take care of the <filename>recipes-bsp</filename> recipes:
|
||||
<literallayout class='monospaced'>
|
||||
$ rm -rf meta-mymachine/recipes-bsp/formfactor/formfactor/crownbay
|
||||
$ mv meta-mymachine/recipes-bsp/formfactor/formfactor/crownbay-noemgd/ \
|
||||
meta-mymachine/recipes-bsp/formfactor/formfactor/mymachine
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-recipes-graphics'>
|
||||
<title>Changing <filename>recipes-graphics</filename></title>
|
||||
|
||||
<para>
|
||||
Now let's look at <filename>recipes-graphics</filename>.
|
||||
For this example we want to remove anything that supports EMGD and
|
||||
be sure to rename remaining directories appropriately.
|
||||
The following commands clean up the <filename>recipes-graphics</filename> directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ rm -rf meta-mymachine/recipes-graphics/xorg-xserver/xserver-xf86-config/crownbay
|
||||
$ mv meta-mymachine/recipes-graphics/xorg-xserver/xserver-xf86-config/crownbay-noemgd \
|
||||
meta-mymachine/recipes-graphics/xorg-xserver/xserver-xf86-config/mymachine
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
At this point the <filename>recipes-graphics</filename> directory just has files that
|
||||
support Video Electronics Standards Association (VESA) graphics modes and not EMGD.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-recipes-core'>
|
||||
<title>Changing <filename>recipes-core</filename></title>
|
||||
|
||||
<para>
|
||||
Now let's look at changes in <filename>recipes-core</filename>.
|
||||
The file <filename>task-core-tools.bbappend</filename> in
|
||||
<filename>recipes-core/tasks</filename> appends the similarly named recipe
|
||||
located in the local <link linkend='yocto-project-files'>Yocto Project Files</link> at
|
||||
<filename>meta/recipes-core/tasks</filename>.
|
||||
The append file in our layer right now is Crown Bay-specific and supports
|
||||
EMGD and non-EMGD.
|
||||
Here are the contents of the file:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_task-core-tools-profile_append_crownbay = " systemtap"
|
||||
RRECOMMENDS_task-core-tools-profile_append_crownbay-noemgd = " systemtap"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>RRECOMMENDS</filename> statements list packages that
|
||||
extend usability.
|
||||
The first <filename>RRECOMMENDS</filename> statement can be removed, while the
|
||||
second one can be changed to reflect <filename>meta-mymachine</filename>:
|
||||
<literallayout class='monospaced'>
|
||||
RRECOMMENDS_task-core-tools-profile_append_mymachine = " systemtap"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-recipes-kernel'>
|
||||
<title>Changing <filename>recipes-kernel</filename></title>
|
||||
|
||||
<para>
|
||||
Finally, let's look at <filename>recipes-kernel</filename> changes.
|
||||
Recall that the BSP uses the <filename>linux-yocto</filename> kernel as determined
|
||||
earlier in the <filename>mymachine.conf</filename>.
|
||||
The recipe for that kernel is not located in the
|
||||
BSP layer but rather in the local Yocto Project files at
|
||||
<filename>meta/recipes-kernel/linux</filename> and is
|
||||
named <filename>linux-yocto_3.0.bb</filename>.
|
||||
The <filename>SRCREV_machine</filename> and <filename>SRCREV_meta</filename>
|
||||
statements point to the exact commits used by the Yocto Project development team
|
||||
in their source repositories that identify the right kernel for our hardware.
|
||||
In other words, the <filename>SRCREV</filename> values are simply Git commit
|
||||
IDs that identify which commit on each
|
||||
of the kernel branches (machine and meta) will be checked out and used to build
|
||||
the kernel.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
However, in the <filename>meta-mymachine</filename> layer in
|
||||
<filename>recipes-kernel/linux</filename> resides a <filename>.bbappend</filename>
|
||||
file named <filename>linux-yocto_3.0.bbappend</filename> that
|
||||
appends information to the recipe of the same name in <filename>meta/recipes-kernel/linux</filename>.
|
||||
Thus, the <filename>SRCREV</filename> statements in the append file override
|
||||
the more general statements found in <filename>meta</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The <filename>SRCREV</filename> statements in the append file currently identify
|
||||
the kernel that supports the Crown Bay BSP with and without EMGD support.
|
||||
Here are the statements:
|
||||
<literallayout class='monospaced'>
|
||||
SRCREV_machine_pn-linux-yocto_crownbay ?= \
|
||||
"2247da9131ea7e46ed4766a69bb1353dba22f873"
|
||||
SRCREV_meta_pn-linux-yocto_crownbay ?= \
|
||||
"d05450e4aef02c1b7137398ab3a9f8f96da74f52"
|
||||
|
||||
SRCREV_machine_pn-linux-yocto_crownbay-noemgd ?= \
|
||||
"2247da9131ea7e46ed4766a69bb1353dba22f873"
|
||||
SRCREV_meta_pn-linux-yocto_crownbay-noemgd ?= \
|
||||
"d05450e4aef02c1b7137398ab3a9f8f96da74f52"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You will notice that there are two pairs of <filename>SRCREV</filename> statements.
|
||||
The top pair identifies the kernel that supports
|
||||
EMGD, which we don’t care about in this example.
|
||||
The bottom pair identifies the kernel that we will use:
|
||||
<filename>linux-yocto</filename>.
|
||||
At this point though, the unique commit strings all are still associated with
|
||||
Crown Bay and not <filename>meta-mymachine</filename>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
To fix this situation in <filename>linux-yocto_3.0.bbappend</filename>,
|
||||
we delete the two <filename>SRCREV</filename> statements that support
|
||||
EMGD (the top pair).
|
||||
We also change the remaining pair to specify <filename>mymachine</filename>
|
||||
and insert the commit identifiers to identify the kernel in which we
|
||||
are interested, which will be based on the <filename>atom-pc-standard</filename>
|
||||
kernel.
|
||||
In this case, because we're working with the edison branch of everything, we
|
||||
need to use the <filename>SRCREV</filename> values for the atom-pc branch
|
||||
that are associated with the edison release.
|
||||
To find those values, we need to find the <filename>SRCREV</filename>
|
||||
values that edison uses for the atom-pc branch, which we find in the
|
||||
<filename>poky/meta-yocto/recipes-kernel/linux/linux-yocto_3.0.bbappend</filename>
|
||||
file.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The machine <filename>SRCREV</filename> we want is in the
|
||||
<filename>SRCREV_machine_atom-pc</filename> variable.
|
||||
The meta <filename>SRCREV</filename> isn't specified in this file, so it must be
|
||||
specified in the base kernel recipe in the
|
||||
<filename>poky/meta/recipes-kernel/linux/linux-yocto_3.0.bb</filename>
|
||||
file, in the <filename>SRCREV_meta variable</filename> found there.
|
||||
It happens to be the same as the value we already inherited from the
|
||||
<filename>meta-crownbay</filename> BSP.
|
||||
Here are the final <filename>SRCREV</filename> statements:
|
||||
<literallayout class='monospaced'>
|
||||
SRCREV_machine_pn-linux-yocto_mymachine ?= \
|
||||
"1e18e44adbe79b846e382370eb29bc4b8cd5a1a0"
|
||||
SRCREV_meta_pn-linux-yocto_mymachine ?= \
|
||||
"d05450e4aef02c1b7137398ab3a9f8f96da74f52"
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
In this example, we're using the <filename>SRCREV</filename> values we
|
||||
found already captured in the edison release because we're creating a BSP based on
|
||||
edison.
|
||||
If, instead, we had based our BSP on the master branches, we would want to use
|
||||
the most recent <filename>SRCREV</filename> values taken directly from the kernel repo.
|
||||
We will not be doing that for this example.
|
||||
However, if you do base a future BSP on master and
|
||||
if you are familiar with Git repositories, you probably won’t have trouble locating the
|
||||
exact commit strings in the Yocto Project source repositories you need to change
|
||||
the <filename>SRCREV</filename> statements.
|
||||
You can find all the <filename>machine</filename> and <filename>meta</filename>
|
||||
branch points (commits) for the <filename>linux-yocto-3.0</filename> kernel at
|
||||
<ulink url='&YOCTO_GIT_URL;/cgit/cgit.cgi/linux-yocto-3.0'></ulink>.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you need a little more assistance after going to the link then do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Expand the list of branches by clicking <filename>[…]</filename></para></listitem>
|
||||
<listitem><para>Click on the <filename>yocto/standard/common-pc/atom-pc</filename>
|
||||
branch</para></listitem>
|
||||
<listitem><para>Click on the commit column header to view the top commit</para></listitem>
|
||||
<listitem><para>Copy the commit string for use in the
|
||||
<filename>linux-yocto_3.0.bbappend</filename> file</para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For the <filename>SRCREV</filename> statement that points to the <filename>meta</filename>
|
||||
branch use the same procedure except expand the <filename>meta</filename>
|
||||
branch in step 2 above.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Also in the <filename>linux-yocto_3.0.bbappend</filename> file are
|
||||
<filename>COMPATIBLE_MACHINE</filename>, <filename>KMACHINE</filename>,
|
||||
and <filename>KERNEL_FEATURES</filename> statements.
|
||||
Two sets of these exist: one set supports EMGD and one set does not.
|
||||
Because we are not interested in supporting EMGD those three can be deleted.
|
||||
The remaining three must be changed so that <filename>mymachine</filename> replaces
|
||||
<filename>crownbay-noemgd</filename> and <filename>crownbay</filename>.
|
||||
Because we are using the atom-pc branch for this new BSP, we can also find
|
||||
the exact branch we need for the KMACHINE variable in our new BSP from the value
|
||||
we find in the
|
||||
<filename>poky/meta-yocto/recipes-kernel/linux/linux-yocto_3.0.bbappend</filename>
|
||||
file we looked at in a previous step.
|
||||
In this case, the value we want is in the KMACHINE_atom-pc variable in that file.
|
||||
Here is the final <filename>linux-yocto_3.0.bbappend</filename> file after all
|
||||
the edits:
|
||||
<literallayout class='monospaced'>
|
||||
FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
|
||||
|
||||
COMPATIBLE_MACHINE_mymachine = "mymachine"
|
||||
KMACHINE_mymachine = "yocto/standard/common-pc/atom-pc"
|
||||
KERNEL_FEATURES_append_mymachine += " cfg/smp.scc"
|
||||
|
||||
SRCREV_machine_pn-linux-yocto_mymachine ?= \
|
||||
"1e18e44adbe79b846e382370eb29bc4b8cd5a1a0"
|
||||
SRCREV_meta_pn-linux-yocto_mymachine ?= \
|
||||
"d05450e4aef02c1b7137398ab3a9f8f96da74f52"
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='bsp-recipe-change-summary'>
|
||||
<title>BSP Recipe Change Summary</title>
|
||||
|
||||
<para>
|
||||
In summary, the edits to the layer’s recipe files result in removal of any files and
|
||||
statements that do not support your targeted hardware in addition to the inclusion
|
||||
of any new recipes you might need.
|
||||
In this example, it was simply a matter of ridding the new layer
|
||||
<filename>meta-mymachine</filename> of any code that supported the EMGD features
|
||||
and making sure we were identifying the kernel that supports our example, which
|
||||
is the <filename>atom-pc-standard</filename> kernel.
|
||||
We did not introduce any new recipes to the layer.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Finally, it is also important to update the layer’s <filename>README</filename>
|
||||
file so that the information in it reflects your BSP.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section id='preparing-for-the-build-app'>
|
||||
<title>Preparing for the Build</title>
|
||||
|
||||
<para>
|
||||
To get ready to build your image that uses the new layer you need to do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Get the environment ready for the build by sourcing the environment
|
||||
script.
|
||||
The environment script is in the top-level of the local Yocto Project files
|
||||
directory structure.
|
||||
The script has the string
|
||||
<filename>init-build-env</filename> in the file’s name.
|
||||
For this example, the following command gets the build environment ready:
|
||||
<literallayout class='monospaced'>
|
||||
$ source oe-init-build-env yocto-build
|
||||
</literallayout>
|
||||
When you source the script a build directory is created in the current
|
||||
working directory.
|
||||
In our example we were in the <filename>poky</filename> directory.
|
||||
Thus, entering the previous command created the <filename>yocto-build</filename> directory.
|
||||
If you do not provide a name for the build directory it defaults to
|
||||
<filename>build</filename>.
|
||||
The <filename>yocto-build</filename> directory contains a
|
||||
<filename>conf</filename> directory that has
|
||||
two configuration files you will need to check: <filename>bblayers.conf</filename>
|
||||
and <filename>local.conf</filename>.</para></listitem>
|
||||
<listitem><para>Check and edit the resulting <filename>local.conf</filename> file.
|
||||
This file minimally identifies the machine for which to build the image by
|
||||
configuring the <filename>MACHINE</filename> variable.
|
||||
For this example you must set the variable to mymachine as follows:
|
||||
<literallayout class='monospaced'>
|
||||
MACHINE ??= “mymachine”
|
||||
</literallayout>
|
||||
You should also be sure any other variables in which you are interested are set.
|
||||
Some variables to consider are <filename>BB_NUMBER_THREADS</filename>
|
||||
and <filename>PARALLEL_MAKE</filename>, both of which can greatly reduce your build time
|
||||
if your development system supports multiple cores.
|
||||
For development systems that support multiple cores, a good rule of thumb is to set
|
||||
both the <filename>BB_NUMBER_THREADS</filename> and <filename>PARALLEL_MAKE</filename>
|
||||
variables to twice the number of cores your system supports.</para></listitem>
|
||||
<listitem><para>Update the <filename>bblayers.conf</filename> file so that it includes
|
||||
the path to your new BSP layer.
|
||||
In this example you need to include the pathname to <filename>meta-mymachine</filename>.
|
||||
For this example the
|
||||
<filename>BBLAYERS</filename> variable in the file would need to include the following path:
|
||||
<literallayout class='monospaced'>
|
||||
$HOME/poky/meta-intel/meta-mymachine
|
||||
</literallayout></para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The appendix
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;#ref-variables-glos'>
|
||||
Reference: Variables Glossary</ulink> in the Yocto Project Reference Manual has more information
|
||||
on configuration variables.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='building-the-image-app'>
|
||||
<title>Building and Booting the Image</title>
|
||||
|
||||
<para>
|
||||
To build the image for our <filename>meta-mymachine</filename> BSP enter the following command
|
||||
from the same shell from which you ran the setup script.
|
||||
You should run the <filename>bitbake</filename> command without any intervening shell commands.
|
||||
For example, moving your working directory around could cause problems.
|
||||
Here is the command for this example:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -k core-image-sato
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
This command specifies an image that has Sato support and that can be run from a USB device or
|
||||
from a CD without having to first install anything.
|
||||
The build process takes significant time and includes thousands of tasks, which are reported
|
||||
at the console.
|
||||
If the build results in any type of error you should check for misspellings in the
|
||||
files you changed or problems with your host development environment such as missing packages.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Finally, once you have an image, you can try booting it from a device
|
||||
(e.g. a USB device).
|
||||
To prepare a bootable USB device, insert a USB flash drive into your build system and
|
||||
copy the <filename>.hddimg</filename> file, located in the
|
||||
<filename>poky/build/tmp/deploy/images</filename>
|
||||
directory after a successful build to the flash drive.
|
||||
Assuming the USB flash drive takes device <filename>/dev/sdf</filename>,
|
||||
use <filename>dd</filename> to copy the live image to it.
|
||||
For example:
|
||||
<literallayout class='monospaced'>
|
||||
# dd if=core-image-sato-mymachine-20111101223904.hddimg of=/dev/sdf
|
||||
# sync
|
||||
# eject /dev/sdf
|
||||
</literallayout>
|
||||
You should now have a bootable USB flash device.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Insert the device
|
||||
into a bootable USB socket on the target, and power it on.
|
||||
The system should boot to the Sato graphical desktop.
|
||||
<footnote><para>Because
|
||||
this new image is not in any way tailored to the system you're
|
||||
booting it on, which is assumed to be some sort of atom-pc (netbook) system for this
|
||||
example, it might not be completely functional though it should at least boot to a text
|
||||
prompt.
|
||||
Specifically, it might fail to boot into graphics without some tweaking.
|
||||
If this ends up being the case, a possible next step would be to replace the
|
||||
<filename>mymachine.conf</filename>
|
||||
contents with the contents of <filename>atom-pc.conf</filename> and replace
|
||||
<filename>xorg.conf</filename> with <filename>atom-pc xorg.conf</filename>
|
||||
in <filename>meta-yocto</filename> and see if it fares any better.
|
||||
In any case, following the previous steps will give you a buildable image that
|
||||
will probably boot on most systems.
|
||||
Getting things working like you want
|
||||
them to for your hardware will normally require some amount of experimentation with
|
||||
configuration settings.</para></footnote>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
For reference, the sato image produced by the previous steps for edison
|
||||
should look like the following in terms of size.
|
||||
If your sato image is much different from this,
|
||||
you probably made a mistake in one of the above steps:
|
||||
<literallayout class='monospaced'>
|
||||
358715392 2011-11-01 19:11 core-image-sato-mymachine-20111101223904.hddimg
|
||||
</literallayout>
|
||||
<note>The previous instructions are also present in the README that was copied
|
||||
from meta-crownbay, which should also be updated to reflect the specifics of your
|
||||
new BSP.
|
||||
That file and the <filename>README.hardware</filename> file in the top-level
|
||||
<filename>poky</filename> directory
|
||||
also provides some suggestions for things to try if booting fails and produces
|
||||
strange error messages.</note>
|
||||
</para>
|
||||
</section>
|
||||
</appendix>
|
||||
|
||||
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||
@@ -18,14 +18,13 @@
|
||||
sources where you can find more detail.
|
||||
For example, detailed information on Git, repositories and open source in general
|
||||
can be found in many places.
|
||||
Another example is how to get set up to use the Yocto Project, which our Yocto Project
|
||||
Quick Start covers.
|
||||
Another example is how to get set up to use the Yocto Project, which our Yocto Project Quick Start covers.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The Yocto Project Development Manual, however, does provide detailed examples
|
||||
on how to change the kernel source code, reconfigure the kernel, and develop
|
||||
an application using the popular <trademark class='trade'>Eclipse</trademark> IDE.
|
||||
The Yocto Project Development Manual, however, does provide detailed examples on how to create a
|
||||
Board Support Package (BSP), change the kernel source code, and re-configure the kernel.
|
||||
You can find this information in the appendices of the manual.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
@@ -44,8 +43,14 @@
|
||||
<listitem><para>Development case overviews for both system development and user-space
|
||||
applications.</para></listitem>
|
||||
<listitem><para>An overview and understanding of the emulation environment used with
|
||||
the Yocto Project - the Quick EMUlator (QEMU).</para></listitem>
|
||||
<listitem><para>An understanding of basic kernel architecture and concepts.</para></listitem>
|
||||
the Yocto Project (QEMU).</para></listitem>
|
||||
<!-- <listitem><para>A discussion of target-level analysis techniques, tools, tips,
|
||||
and tricks.</para></listitem>
|
||||
<listitem><para>Considerations for deploying your final product.</para></listitem> -->
|
||||
<listitem><para>An understanding of basic kernel architecture and
|
||||
concepts.</para></listitem>
|
||||
<!-- <listitem><para>Information that will help you migrate an existing project to the
|
||||
Yocto Project development environment.</para></listitem> -->
|
||||
<listitem><para>Many references to other sources of related information.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
@@ -59,14 +64,14 @@
|
||||
<itemizedlist>
|
||||
<listitem><para>Step-by-step instructions if those instructions exist in other Yocto
|
||||
Project documentation.
|
||||
For example, the Yocto Project Application Developer's Guide contains detailed
|
||||
instruction on how to run the
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;#installing-the-adt'>Installing the ADT and Toolchains</ulink>,
|
||||
which is used to set up a cross-development environment.</para></listitem>
|
||||
For example, the Application Development Toolkit (ADT) User’s Guide contains detailed
|
||||
instruction on how to obtain and configure the
|
||||
<trademark class='trade'>Eclipse</trademark> Yocto Plug-in.</para></listitem>
|
||||
<listitem><para>Reference material.
|
||||
This type of material resides in an appropriate reference manual.
|
||||
For example, system variables are documented in the
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>Yocto Project Reference Manual</ulink>.</para></listitem>
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>
|
||||
Yocto Project Reference Manual</ulink>.</para></listitem>
|
||||
<listitem><para>Detailed public information that is not specific to the Yocto Project.
|
||||
For example, exhaustive information on how to use Git is covered better through the
|
||||
Internet than in this manual.</para></listitem>
|
||||
@@ -86,49 +91,42 @@
|
||||
</emphasis> The home page for the Yocto Project provides lots of information on the project
|
||||
as well as links to software and documentation.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_DOCS_QS_URL;'>Yocto Project Quick Start</ulink>:</emphasis> This short document lets you get started
|
||||
<ulink url='&YOCTO_DOCS_QS_URL;'>
|
||||
The Yocto Project Quick Start</ulink>:</emphasis> This short document lets you get started
|
||||
with the Yocto Project quickly and start building an image.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>Yocto Project Reference Manual</ulink>:</emphasis> This manual is a reference
|
||||
guide to the OpenEmbedded build system known as "Poky."
|
||||
<ulink url='&YOCTO_DOCS_REF_URL;'>
|
||||
The Yocto Project Reference Manual</ulink>:</emphasis> This manual is a reference
|
||||
guide to the Yocto Project build component known as "Poky."
|
||||
The manual also contains a reference chapter on Board Support Package (BSP)
|
||||
layout.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>Yocto Project Application Developer's Guide</ulink>:</emphasis>
|
||||
This guide provides information that lets you get going with the Application
|
||||
Development Toolkit (ADT) and stand-alone cross-development toolchains to
|
||||
<ulink url='&YOCTO_DOCS_ADT_URL;'>
|
||||
The Yocto Project Application Development Toolkit (ADT) User's Guide</ulink>:</emphasis>
|
||||
This guide provides information that lets you get going with the ADT to
|
||||
develop projects using the Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>Yocto Project Board Support Package (BSP) Developer's Guide</ulink>:</emphasis>
|
||||
<ulink url='&YOCTO_DOCS_BSP_URL;'>
|
||||
The Yocto Project Board Support Package (BSP) Developer's Guide</ulink>:</emphasis>
|
||||
This guide defines the structure for BSP components.
|
||||
Having a commonly understood structure encourages standardization.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_DOCS_KERNEL_URL;'>Yocto Project Kernel Architecture and Use Manual</ulink>:</emphasis>
|
||||
<ulink url='&YOCTO_DOCS_KERNEL_URL;'>
|
||||
The Yocto Project Kernel Architecture and Use Manual</ulink>:</emphasis>
|
||||
This manual describes the architecture of the Yocto Project kernel and provides
|
||||
some work flow examples.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.youtube.com/watch?v=3ZlOu-gLsh0'>
|
||||
Eclipse IDE Yocto Plug-in</ulink>:</emphasis> A step-by-step instructional video that
|
||||
Yocto Eclipse Plug-in</ulink>:</emphasis> A step-by-step instructional video that
|
||||
demonstrates how an application developer uses Yocto Plug-in features within
|
||||
the Eclipse IDE.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_WIKI_URL;/wiki/FAQ'>FAQ</ulink>:</emphasis>
|
||||
A list of commonly asked questions and their answers.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_HOME_URL;/download/yocto/yocto-project-&DISTRO;-release-notes-poky-&POKYVERSION;'>
|
||||
<ulink url='&YOCTO_HOME_URL;/download/yocto/yocto-project-1.1-release-notes-poky-&POKYVERSION;'>
|
||||
Release Notes</ulink>:</emphasis> Features, updates and known issues for the current
|
||||
release of the Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_HOME_URL;/projects/hob'>
|
||||
Hob</ulink>:</emphasis> A graphical user interface for BitBake.
|
||||
Hob's primary goal is to enable a user to perform common tasks more easily.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_HOME_URL;/documentation/build-appliance'>
|
||||
Build Appliance</ulink>:</emphasis> A bootable custom embedded Linux image you can
|
||||
either build using a non-Linux development system (VMware applications) or download
|
||||
from the Yocto Project website.
|
||||
See the <ulink url='&YOCTO_HOME_URL;/documentation/build-appliance'>Build Appliance</ulink>
|
||||
page for more information.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&YOCTO_BUGZILLA_URL;'>Bugzilla</ulink>:</emphasis>
|
||||
The bug tracking application the Yocto Project uses.
|
||||
@@ -143,40 +141,38 @@
|
||||
<listitem><para><ulink url='&YOCTO_LISTS_URL;/listinfo/poky'></ulink> for a
|
||||
Yocto Project Discussions mailing list about the Poky build system.</para></listitem>
|
||||
<listitem><para><ulink url='&YOCTO_LISTS_URL;/listinfo/yocto-announce'></ulink>
|
||||
for a mailing list to receive official Yocto Project announcements for developments and
|
||||
for a mailing list to receive offical Yocto Project announcements for developments and
|
||||
as well as Yocto Project milestones.</para></listitem>
|
||||
</itemizedlist></para></listitem>
|
||||
<listitem><para><emphasis>Internet Relay Chat (IRC):</emphasis>
|
||||
Two IRC channels on freenode are available
|
||||
for Yocto Project and Poky discussions: <filename>#yocto</filename> and
|
||||
<filename>#poky</filename>, respectively.</para></listitem>
|
||||
<filename>#poky</filename>.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&OH_HOME_URL;'>OpenedHand</ulink>:</emphasis>
|
||||
The company that initially developed the Poky project, which is the basis
|
||||
for the OpenEmbedded build system used by the Yocto Project.
|
||||
OpenedHand was acquired by Intel Corporation in 2008.</para></listitem>
|
||||
The company where the Yocto Project build system Poky was first developed.
|
||||
OpenedHand has since been acquired by Intel Corporation.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://www.intel.com/'>Intel Corporation</ulink>:</emphasis>
|
||||
A multinational semiconductor chip manufacturer company whose Software and
|
||||
Services Group created and supports the Yocto Project.
|
||||
Intel acquired OpenedHand in 2008.</para></listitem>
|
||||
The company that acquired OpenedHand in 2008 and continues development on the
|
||||
Yocto Project.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='&OE_HOME_URL;'>OpenEmbedded</ulink>:</emphasis>
|
||||
The build system used by the Yocto Project.
|
||||
This project is the upstream, generic, embedded distribution from which the Yocto
|
||||
Project derives its build system (Poky) from and to which it contributes.</para></listitem>
|
||||
The upstream, generic, embedded distribution the Yocto Project build system (Poky) derives
|
||||
from and to which it contributes.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://developer.berlios.de/projects/bitbake/'>
|
||||
BitBake</ulink>:</emphasis> The tool used by the OpenEmbedded build system
|
||||
to process project metadata.</para></listitem>
|
||||
BitBake</ulink>:</emphasis> The tool used to process Yocto Project metadata.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
BitBake User Manual:</emphasis>
|
||||
A comprehensive guide to the BitBake tool.
|
||||
If you want information on BitBake, see the user manual inculded in the
|
||||
<filename>bitbake/doc/manual</filename> directory of the
|
||||
<link linkend='source-directory'>Source Directory</link>.</para></listitem>
|
||||
<ulink url='http://bitbake.berlios.de/manual/'>
|
||||
BitBake User Manual</ulink>:</emphasis> A comprehensive guide to the BitBake tool.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://wiki.qemu.org/Index.html'>Quick EMUlator (QEMU)</ulink>:
|
||||
<ulink url='http://pimlico-project.org/'>Pimlico</ulink>:</emphasis>
|
||||
A suite of lightweight Personal Information Management (PIM) applications designed
|
||||
primarily for handheld and mobile devices.</para></listitem>
|
||||
<listitem><para><emphasis>
|
||||
<ulink url='http://wiki.qemu.org/Index.html'>QEMU</ulink>:
|
||||
</emphasis> An open-source machine emulator and virtualizer.</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
@@ -1,553 +0,0 @@
|
||||
<!DOCTYPE appendix PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
|
||||
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
|
||||
|
||||
<appendix id='dev-manual-kernel-appendix'>
|
||||
|
||||
<title>Kernel Modification Example</title>
|
||||
|
||||
<para>
|
||||
Kernel modification involves changing or adding configurations to an existing kernel,
|
||||
changing or adding recipes to the kernel that are needed to support specific hardware features,
|
||||
or even altering the source code itself.
|
||||
This appendix presents simple examples that modify the kernel source code,
|
||||
change the kernel configuration, and add a kernel source recipe.
|
||||
<note>
|
||||
You can use the <filename>yocto-kernel</filename> script
|
||||
found in the <link linkend='source-directory'>Source Directory</link>
|
||||
under <filename>scripts</filename> to manage kernel patches and configuration.
|
||||
See the "<ulink url='&YOCTO_DOCS_BSP_URL;#managing-kernel-patches-and-config-items-with-yocto-kernel'>Managing kernel Patches and Config Items with yocto-kernel</ulink>"
|
||||
section in the Yocto Project Board Support Packages (BSP) Developer's Guide for
|
||||
more information.</note>
|
||||
</para>
|
||||
|
||||
<section id='modifying-the-kernel-source-code'>
|
||||
<title>Modifying the Kernel Source Code</title>
|
||||
|
||||
<para>
|
||||
This example adds some simple QEMU emulator console output at boot time by
|
||||
adding <filename>printk</filename> statements to the kernel's
|
||||
<filename>calibrate.c</filename> source code file.
|
||||
Booting the modified image causes the added messages to appear on the emulator's
|
||||
console.
|
||||
</para>
|
||||
|
||||
<section id='understanding-the-files-you-need'>
|
||||
<title>Understanding the Files You Need</title>
|
||||
|
||||
<para>
|
||||
Before you modify the kernel, you need to know what Git repositories and file
|
||||
structures you need.
|
||||
Briefly, you need the following:
|
||||
<itemizedlist>
|
||||
<listitem><para>A local
|
||||
<link linkend='source-directory'>Source Directory</link> for the
|
||||
poky Git repository</para></listitem>
|
||||
<listitem><para>Local copies of the
|
||||
<link linkend='poky-extras-repo'><filename>poky-extras</filename></link>
|
||||
Git repository placed within the Source Directory.</para></listitem>
|
||||
<listitem><para>A bare clone of the
|
||||
<link linkend='local-kernel-files'>Yocto Project Kernel</link> upstream Git
|
||||
repository to which you want to push your modifications.
|
||||
</para></listitem>
|
||||
<listitem><para>A copy of that bare clone in which you make your source
|
||||
modifications</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following figure summarizes these four areas.
|
||||
Within each rectangular that represents a data structure, a
|
||||
host development directory pathname appears at the
|
||||
lower left-hand corner of the box.
|
||||
These pathnames are the locations used in this example.
|
||||
The figure also provides key statements and commands used during the kernel
|
||||
modification process:
|
||||
</para>
|
||||
|
||||
<para>
|
||||
<imagedata fileref="figures/kernel-example-repos-generic.png" width="7in" depth="5in"
|
||||
align="center" scale="100" />
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is a brief description of the four areas:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Local Source Directory:</emphasis>
|
||||
This area contains all the metadata that supports building images
|
||||
using the OpenEmbedded build system.
|
||||
In this example, the
|
||||
<link linkend='source-directory'>Source Directory</link> also
|
||||
contains the
|
||||
<link linkend='build-directory'>Build Directory</link>,
|
||||
which contains the configuration directory
|
||||
that lets you control the build.
|
||||
Also in this example, the Source Directory contains local copies of the
|
||||
<filename>poky-extras</filename> Git repository.</para>
|
||||
<para>See the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>"
|
||||
for information on how to get these files on your local system.</para></listitem>
|
||||
<listitem><para><emphasis>Local copies of the <filename>poky-extras</filename> Git Repository:</emphasis>
|
||||
This area contains the <filename>meta-kernel-dev</filename> layer,
|
||||
which is where you make changes that append the kernel build recipes.
|
||||
You edit <filename>.bbappend</filename> files to locate your
|
||||
local kernel source files and to identify the kernel being built.
|
||||
This Git repository is a gathering place for extensions to the Yocto Project
|
||||
(or really any) kernel recipes that faciliate the creation and development
|
||||
of kernel features, BSPs or configurations.</para>
|
||||
<para>See the bulleted item
|
||||
"<link linkend='poky-extras-repo'>The
|
||||
<filename>poky-extras</filename> Git Repository</link>"
|
||||
for information on how to get these files.</para></listitem>
|
||||
<listitem><para><emphasis>Bare Clone of the Yocto Project kernel:</emphasis>
|
||||
This bare Git repository tracks the upstream Git repository of the Linux
|
||||
Yocto kernel source code you are changing.
|
||||
When you modify the kernel you must work through a bare clone.
|
||||
All source code changes you make to the kernel must be committed and
|
||||
pushed to the bare clone using Git commands.
|
||||
As mentioned, the <filename>.bbappend</filename> file in the
|
||||
<filename>poky-extras</filename> repository points to the bare clone
|
||||
so that the build process can locate the locally changed source files.</para>
|
||||
<para>See the bulleted item
|
||||
"<link linkend='local-kernel-files'>Yocto Project Kernel</link>"
|
||||
for information on how to set up the bare clone.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Copy of the Yocto Project Kernel Bare Clone:</emphasis>
|
||||
This Git repository contains the actual source files that you modify.
|
||||
Any changes you make to files in this location need to ultimately be pushed
|
||||
to the bare clone using the <filename>git push</filename> command.</para>
|
||||
<para>See the bulleted item
|
||||
"<link linkend='local-kernel-files'>Yocto Project Kernel</link>"
|
||||
for information on how to set up the bare clone.
|
||||
<note>Typically, Git workflows follow a scheme where changes made to a local area
|
||||
are pulled into a Git repository.
|
||||
However, because the <filename>git pull</filename> command does not work
|
||||
with bare clones, this workflow pushes changes to the
|
||||
repository even though you could use other more complicated methods to
|
||||
get changes into the bare clone.</note>
|
||||
</para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-local-yocto-project-files-git-repository'>
|
||||
<title>Setting Up the Local Source Directory</title>
|
||||
|
||||
<para>
|
||||
You can set up the
|
||||
<link linkend='source-directory'>Source Directory</link>
|
||||
through tarball extraction or by
|
||||
cloning the <filename>poky</filename> Git repository.
|
||||
This example uses <filename>poky</filename> as the root directory of the
|
||||
local Source Directory.
|
||||
See the bulleted item
|
||||
"<link linkend='local-yp-release'>Yocto Project Release</link>"
|
||||
for information on how to get these files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once you have Source Directory set up,
|
||||
you have many development branches from which you can work.
|
||||
From inside the local repository you can see the branch names and the tag names used
|
||||
in the upstream Git repository by using either of the following commands:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd poky
|
||||
$ git branch -a
|
||||
$ git tag -l
|
||||
</literallayout>
|
||||
This example uses the Yocto Project &DISTRO; Release code named "&DISTRO_NAME;",
|
||||
which maps to the <filename>&DISTRO_NAME;</filename> branch in the repository.
|
||||
The following commands create and checkout the local <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-poky-extras-git-repository'>
|
||||
<title>Setting Up the Local poky-extras Git Repository</title>
|
||||
|
||||
<para>
|
||||
This example creates a local copy of the <filename>poky-extras</filename> Git
|
||||
repository inside the <filename>poky</filename> Source Directory.
|
||||
See the bulleted item "<link linkend='poky-extras-repo'>The
|
||||
<filename>poky-extras</filename> Git Repository</link>"
|
||||
for information on how to set up a local copy of the
|
||||
<filename>poky-extras</filename> repository.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Because this example uses the Yocto Project &DISTRO; Release code
|
||||
named "&DISTRO_NAME;", which maps to the <filename>&DISTRO_NAME;</filename>
|
||||
branch in the repository, you need to be sure you are using that
|
||||
branch for <filename>poky-extras</filename>.
|
||||
The following commands create and checkout the local
|
||||
branch you are using for the <filename>&DISTRO_NAME;</filename>
|
||||
branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky/poky-extras
|
||||
$ git checkout -b &DISTRO_NAME; origin/&DISTRO_NAME;
|
||||
Branch &DISTRO_NAME; set up to track remote branch &DISTRO_NAME; from origin.
|
||||
Switched to a new branch '&DISTRO_NAME;'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='setting-up-the-bare-clone-and-its-copy'>
|
||||
<title>Setting Up the Bare Clone and its Copy</title>
|
||||
|
||||
<para>
|
||||
This example modifies the <filename>linux-yocto-3.4</filename> kernel.
|
||||
Thus, you need to create a bare clone of that kernel and then make a copy of the
|
||||
bare clone.
|
||||
See the bulleted item
|
||||
"<link linkend='local-kernel-files'>Yocto Project Kernel</link>"
|
||||
for information on how to do that.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The bare clone exists for the kernel build tools and simply as the receiving end
|
||||
of <filename>git push</filename>
|
||||
commands after you make edits and commits inside the copy of the clone.
|
||||
The copy (<filename>my-linux-yocto-3.4-work</filename> in this example) has to have
|
||||
a local branch created and checked out for your work.
|
||||
This example uses <filename>common-pc-base</filename> as the local branch.
|
||||
The following commands create and checkout the branch:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/my-linux-yocto-3.4-work
|
||||
$ git checkout -b standard-common-pc-base origin/standard/common-pc/base
|
||||
Branch standard-common-pc-base set up to track remote branch
|
||||
standard/common-pc/base from origin.
|
||||
Switched to a new branch 'standard-common-pc-base'
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='building-and-booting-the-default-qemu-kernel-image'>
|
||||
<title>Building and Booting the Default QEMU Kernel Image</title>
|
||||
|
||||
<para>
|
||||
Before we make changes to the kernel source files, this example first builds the
|
||||
default image and then boots it inside the QEMU emulator.
|
||||
<note>
|
||||
Because a full build can take hours, you should check two variables in the
|
||||
<filename>build</filename> directory that is created after you source the
|
||||
<filename>&OE_INIT_FILE;</filename> script.
|
||||
You can find these variables
|
||||
<filename>BB_NUMBER_THREADS</filename> and <filename>PARALLEL_MAKE</filename>
|
||||
in the <filename>build/conf</filename> directory in the
|
||||
<filename>local.conf</filename> configuration file.
|
||||
By default, these variables are commented out.
|
||||
If your host development system supports multi-core and multi-thread capabilities,
|
||||
you can uncomment these statements and set the variables to significantly shorten
|
||||
the full build time.
|
||||
As a guideline, set both <filename>BB_NUMBER_THREADS</filename> and
|
||||
<filename>PARALLEL_MAKE</filename> to twice the number
|
||||
of cores your machine supports.
|
||||
</note>
|
||||
The following two commands <filename>source</filename> the build environment setup script
|
||||
and build the default <filename>qemux86</filename> image.
|
||||
If necessary, the script creates the build directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ source &OE_INIT_FILE;
|
||||
You had no conf/local.conf file. This configuration file has therefore been
|
||||
created for you with some default values. You may wish to edit it to use a
|
||||
different MACHINE (target hardware) or enable parallel build options to take
|
||||
advantage of multiple cores for example. See the file for more information as
|
||||
common configuration options are commented.
|
||||
|
||||
The Yocto Project has extensive documentation about OE including a reference manual
|
||||
which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
|
||||
For more information about OpenEmbedded see their website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
You had no conf/bblayers.conf file. The configuration file has been created for
|
||||
you with some default values. To add additional metadata layers into your
|
||||
configuration please add entries to this file.
|
||||
|
||||
The Yocto Project has extensive documentation about OE including a reference manual
|
||||
which can be found at:
|
||||
http://yoctoproject.org/documentation
|
||||
|
||||
For more information about OpenEmbedded see their website:
|
||||
http://www.openembedded.org/
|
||||
|
||||
|
||||
|
||||
### Shell environment set up for builds. ###
|
||||
|
||||
You can now run 'bitbake <target>>'
|
||||
|
||||
Common targets are:
|
||||
core-image-minimal
|
||||
core-image-sato
|
||||
meta-toolchain
|
||||
meta-toolchain-sdk
|
||||
adt-installer
|
||||
meta-ide-support
|
||||
|
||||
You can also run generated qemu images with a command like 'runqemu qemux86'
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following <filename>bitbake</filename> command starts the build:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -k core-image-minimal
|
||||
</literallayout>
|
||||
<note>Be sure to check the settings in the <filename>local.conf</filename>
|
||||
before starting the build.</note>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
After the build completes, you can start the QEMU emulator using the resulting image
|
||||
<filename>qemux86</filename> as follows:
|
||||
<literallayout class='monospaced'>
|
||||
$ runqemu qemux86
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
As the image boots in the emulator, console message and status output appears
|
||||
across the terminal window.
|
||||
Because the output scrolls by quickly, it is difficult to read.
|
||||
To examine the output, you log into the system using the
|
||||
login <filename>root</filename> with no password.
|
||||
Once you are logged in, issue the following command to scroll through the
|
||||
console output:
|
||||
<literallayout class='monospaced'>
|
||||
# dmesg | less
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Take note of the output as you will want to look for your inserted print command output
|
||||
later in the example.
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-the-source-code-and-pushing-it-to-the-bare-clone'>
|
||||
<title>Changing the Source Code and Pushing it to the Bare Clone</title>
|
||||
|
||||
<para>
|
||||
The file you change in this example is named <filename>calibrate.c</filename>
|
||||
and is located in the <filename>my-linux-yocto-3.4-work</filename> Git repository
|
||||
(the copy of the bare clone) in <filename>init</filename>.
|
||||
This example simply inserts several <filename>printk</filename> statements
|
||||
at the beginning of the <filename>calibrate_delay</filename> function.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is the unaltered code at the start of this function:
|
||||
<literallayout class='monospaced'>
|
||||
void __cpuinit calibrate_delay(void)
|
||||
{
|
||||
unsigned long lpj;
|
||||
static bool printed;
|
||||
int this_cpu = smp_processor_id();
|
||||
|
||||
if (per_cpu(cpu_loops_per_jiffy, this_cpu)) {
|
||||
.
|
||||
.
|
||||
.
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Here is the altered code showing five new <filename>printk</filename> statements
|
||||
near the top of the function:
|
||||
<literallayout class='monospaced'>
|
||||
void __cpuinit calibrate_delay(void)
|
||||
{
|
||||
unsigned long lpj;
|
||||
static bool printed;
|
||||
int this_cpu = smp_processor_id();
|
||||
|
||||
printk("*************************************\n");
|
||||
printk("* *\n");
|
||||
printk("* HELLO YOCTO KERNEL *\n");
|
||||
printk("* *\n");
|
||||
printk("*************************************\n");
|
||||
|
||||
if (per_cpu(cpu_loops_per_jiffy, this_cpu)) {
|
||||
.
|
||||
.
|
||||
.
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
After making and saving your changes, you need to stage them for the push.
|
||||
The following Git commands are one method of staging and committing your changes:
|
||||
<literallayout class='monospaced'>
|
||||
$ git add calibrate.c
|
||||
$ git commit --signoff
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Once the source code has been modified, you need to use Git to push the changes to
|
||||
the bare clone.
|
||||
If you do not push the changes, then the OpenEmbedded build system will not pick
|
||||
up the changed source files.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
The following command pushes the changes to the bare clone:
|
||||
<literallayout class='monospaced'>
|
||||
$ git push origin standard-common-pc-base:standard/default/common-pc/base
|
||||
</literallayout>
|
||||
</para>
|
||||
</section>
|
||||
|
||||
<section id='changing-build-parameters-for-your-build'>
|
||||
<title>Changing Build Parameters for Your Build</title>
|
||||
|
||||
<para>
|
||||
At this point, the source has been changed and pushed.
|
||||
The example now defines some variables used by the OpenEmbedded build system
|
||||
to locate your kernel source.
|
||||
You essentially need to identify where to find the kernel recipe and the changed source code.
|
||||
You also need to be sure some basic configurations are in place that identify the
|
||||
type of machine you are building and to help speed up the build should your host support
|
||||
multiple-core and thread capabilities.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Do the following to make sure the build parameters are set up for the example.
|
||||
Once you set up these build parameters, they do not have to change unless you
|
||||
change the target architecture of the machine you are building or you move
|
||||
the bare clone, copy of the clone, or the <filename>poky-extras</filename> repository:
|
||||
<itemizedlist>
|
||||
<listitem><para><emphasis>Build for the Correct Target Architecture:</emphasis> The
|
||||
<filename>local.conf</filename> file in the build directory defines the build's
|
||||
target architecture.
|
||||
By default, <filename>MACHINE</filename> is set to
|
||||
<filename>qemux86</filename>, which specifies a 32-bit
|
||||
<trademark class='registered'>Intel</trademark> Architecture
|
||||
target machine suitable for the QEMU emulator.
|
||||
In this example, <filename>MACHINE</filename> is correctly configured.
|
||||
</para></listitem>
|
||||
<listitem><para><emphasis>Optimize Build Time:</emphasis> Also in the
|
||||
<filename>local.conf</filename> file are two variables that can speed your
|
||||
build time if your host supports multi-core and multi-thread capabilities:
|
||||
<filename>BB_NUMBER_THREADS</filename> and <filename>PARALLEL_MAKE</filename>.
|
||||
If the host system has multiple cores then you can optimize build time
|
||||
by setting both these variables to twice the number of
|
||||
cores.</para></listitem>
|
||||
<listitem><para><emphasis>Identify Your <filename>meta-kernel-dev</filename>
|
||||
Layer:</emphasis> The <filename>BBLAYERS</filename> variable in the
|
||||
<filename>bblayers.conf</filename> file found in the
|
||||
<filename>poky/build/conf</filename> directory needs to have the path to your local
|
||||
<filename>meta-kernel-dev</filename> layer.
|
||||
By default, the <filename>BBLAYERS</filename> variable contains paths to
|
||||
<filename>meta</filename> and <filename>meta-yocto</filename> in the
|
||||
<filename>poky</filename> Git repository.
|
||||
Add the path to your <filename>meta-kernel-dev</filename> location.
|
||||
Be sure to substitute your user information in the statement.
|
||||
Here is an example:
|
||||
<literallayout class='monospaced'>
|
||||
BBLAYERS = " \
|
||||
/home/scottrif/poky/meta \
|
||||
/home/scottrif/poky/meta-yocto \
|
||||
/home/scottrif/poky/meta-yocto-bsp \
|
||||
/home/scottrif/poky/poky-extras/meta-kernel-dev \
|
||||
"
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para><emphasis>Identify Your Source Files:</emphasis> In the
|
||||
<filename>linux-yocto_3.4.bbappend</filename> file located in the
|
||||
<filename>poky-extras/meta-kernel-dev/recipes-kernel/linux</filename>
|
||||
directory, you need to identify the location of the
|
||||
local source code, which in this example is the bare clone named
|
||||
<filename>linux-yocto-3.4.git</filename>.
|
||||
To do this, set the <filename>KSRC_linux_yocto</filename> variable to point to your
|
||||
local <filename>linux-yocto-3.4.git</filename> Git repository by adding the
|
||||
following statement.
|
||||
Also, be sure the <filename>SRC_URI</filename> variable is pointing to
|
||||
your kernel source files by removing the comment.
|
||||
Finally, be sure to substitute your user information in the statement:
|
||||
<literallayout class='monospaced'>
|
||||
KSRC_linux_yocto_3_4 ?= "/home/scottrif/linux-yocto-3.4.git"
|
||||
SRC_URI = "git://${KSRC_linux_yocto_3_4};protocol=file;nocheckout=1;branch=${KBRANCH},meta;name=machine,meta"
|
||||
</literallayout></para></listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<note>
|
||||
<para>Before attempting to build the modified kernel, there is one more set of changes you
|
||||
need to make in the <filename>meta-kernel-dev</filename> layer.
|
||||
Because all the kernel <filename>.bbappend</filename> files are parsed during the
|
||||
build process regardless of whether you are using them or not, you should either
|
||||
comment out the <filename>COMPATIBLE_MACHINE</filename> statements in all
|
||||
unused <filename>.bbappend</filename> files, or simply remove (or rename) all the files
|
||||
except the one your are using for the build
|
||||
(i.e. <filename>linux-yocto_3.4.bbappend</filename> in this example).</para>
|
||||
<para>If you do not make one of these two adjustments, your machine will be compatible
|
||||
with all the kernel recipes in the <filename>meta-kernel-dev</filename> layer.
|
||||
When your machine is comapatible with all the kernel recipes, the build attempts
|
||||
to build all kernels in the layer.
|
||||
You could end up with build errors blocking your work.</para>
|
||||
</note>
|
||||
</section>
|
||||
|
||||
<section id='building-and-booting-the-modified-qemu-kernel-image'>
|
||||
<title>Building and Booting the Modified QEMU Kernel Image</title>
|
||||
|
||||
<para>
|
||||
Next, you need to build the modified image.
|
||||
Do the following:
|
||||
<orderedlist>
|
||||
<listitem><para>Your environment should be set up since you previously sourced
|
||||
the <filename>&OE_INIT_FILE;</filename> script.
|
||||
If it isn't, source the script again from <filename>poky</filename>.
|
||||
<literallayout class='monospaced'>
|
||||
$ cd ~/poky
|
||||
$ source &OE_INIT_FILE;
|
||||
</literallayout>
|
||||
</para></listitem>
|
||||
<listitem><para>Be sure old images are cleaned out by running the
|
||||
<filename>cleanall</filename> BitBake task as follows from your build directory:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -c cleanall linux-yocto
|
||||
</literallayout></para>
|
||||
<para><note>Never remove any files by hand from the <filename>tmp/deploy</filename>
|
||||
directory insided the build directory.
|
||||
Always use the BitBake <filename>cleanall</filename> task to clear
|
||||
out previous builds.</note></para></listitem>
|
||||
<listitem><para>Next, build the kernel image using this command:
|
||||
<literallayout class='monospaced'>
|
||||
$ bitbake -k core-image-minimal
|
||||
</literallayout></para></listitem>
|
||||
<listitem><para>Finally, boot the modified image in the QEMU emulator
|
||||
using this command:
|
||||
<literallayout class='monospaced'>
|
||||
$ runqemu qemux86
|
||||
</literallayout></para></listitem>
|
||||
</orderedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Log into the machine using <filename>root</filename> with no password and then
|
||||
use the following shell command to scroll through the console's boot output.
|
||||
<literallayout class='monospaced'>
|
||||
# dmesg | less
|
||||
</literallayout>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
You should see the results of your <filename>printk</filename> statements
|
||||
as part of the output.
|
||||
</para>
|
||||
</section>
|
||||
</section>
|
||||
</appendix>
|
||||
|
||||
<!--
|
||||
vim: expandtab tw=80 ts=4
|
||||
-->
|
||||